diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6a77a8b8e8e3..678e2c6a3724 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -29,7 +29,7 @@ jobs: permissions: pull-requests: write # for googleapis/code-suggester name: Update API List PR - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # don't run the workflow on forks of googleapis/google-cloud-python if: ${{github.repository == 'googleapis/google-cloud-python'}} steps: diff --git a/.kokoro/release-single.sh b/.kokoro/release-single.sh index 5665c4828a93..f917f8ef66d0 100755 --- a/.kokoro/release-single.sh +++ b/.kokoro/release-single.sh @@ -21,7 +21,7 @@ set -eo pipefail pwd # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index c9b8a36f766d..830be65dde19 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -28,7 +28,7 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" + keyname: "google-cloud-pypi-token-keystore-2" } } } diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a695d96468bb..c66fb322589a 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,10 +1,11 @@ { - "packages/google-ads-admanager": "0.1.2", - "packages/google-ai-generativelanguage": "0.6.9", + "packages/google-ads-admanager": "0.2.0", + "packages/google-ads-marketingplatform-admin": "0.1.0", + "packages/google-ai-generativelanguage": "0.6.10", "packages/google-analytics-admin": "0.23.0", - "packages/google-analytics-data": "0.18.11", + "packages/google-analytics-data": "0.18.12", "packages/google-apps-card": "0.1.4", - "packages/google-apps-chat": "0.1.9", + "packages/google-apps-chat": "0.1.12", "packages/google-apps-events-subscriptions": "0.1.2", "packages/google-apps-meet": "0.1.8", "packages/google-apps-script-type": "0.3.10", @@ -17,17 +18,17 @@ "packages/google-cloud-api-keys": "0.5.11", "packages/google-cloud-apigee-connect": "1.9.5", "packages/google-cloud-apigee-registry": "0.6.11", - "packages/google-cloud-apihub": "0.1.0", + "packages/google-cloud-apihub": "0.2.0", "packages/google-cloud-appengine-admin": "1.11.5", "packages/google-cloud-appengine-logging": "1.4.5", "packages/google-cloud-apphub": "0.1.2", "packages/google-cloud-artifact-registry": "1.11.5", - "packages/google-cloud-asset": "3.26.3", + "packages/google-cloud-asset": "3.26.4", "packages/google-cloud-assured-workloads": "1.12.5", "packages/google-cloud-automl": "2.13.5", - "packages/google-cloud-backupdr": "0.1.3", + "packages/google-cloud-backupdr": "0.1.4", "packages/google-cloud-bare-metal-solution": "1.7.5", - "packages/google-cloud-batch": "0.17.27", + "packages/google-cloud-batch": "0.17.29", "packages/google-cloud-beyondcorp-appconnections": "0.4.11", "packages/google-cloud-beyondcorp-appconnectors": "0.4.11", "packages/google-cloud-beyondcorp-appgateways": "0.4.11", @@ -38,24 +39,24 @@ "packages/google-cloud-bigquery-connection": "1.15.5", "packages/google-cloud-bigquery-data-exchange": "0.5.13", "packages/google-cloud-bigquery-datapolicies": "0.6.8", - "packages/google-cloud-bigquery-datatransfer": "3.15.7", + "packages/google-cloud-bigquery-datatransfer": "3.16.0", "packages/google-cloud-bigquery-logging": "1.4.5", "packages/google-cloud-bigquery-migration": "0.11.9", "packages/google-cloud-bigquery-reservation": "1.13.5", "packages/google-cloud-billing": "1.13.6", "packages/google-cloud-billing-budgets": "1.14.5", "packages/google-cloud-binary-authorization": "1.10.5", - "packages/google-cloud-build": "3.24.2", + "packages/google-cloud-build": "3.25.0", "packages/google-cloud-certificate-manager": "1.7.2", - "packages/google-cloud-channel": "1.18.5", - "packages/google-cloud-cloudcontrolspartner": "0.1.3", + "packages/google-cloud-channel": "1.19.0", + "packages/google-cloud-cloudcontrolspartner": "0.2.0", "packages/google-cloud-cloudquotas": "0.1.10", - "packages/google-cloud-commerce-consumer-procurement": "0.1.7", + "packages/google-cloud-commerce-consumer-procurement": "0.1.8", "packages/google-cloud-common": "1.3.5", "packages/google-cloud-compute": "1.19.2", "packages/google-cloud-confidentialcomputing": "0.4.11", "packages/google-cloud-config": "0.1.11", - "packages/google-cloud-contact-center-insights": "1.17.5", + "packages/google-cloud-contact-center-insights": "1.18.0", "packages/google-cloud-container": "2.51.0", "packages/google-cloud-containeranalysis": "2.14.5", "packages/google-cloud-contentwarehouse": "0.7.9", @@ -67,17 +68,17 @@ "packages/google-cloud-dataform": "0.5.11", "packages/google-cloud-datalabeling": "1.10.5", "packages/google-cloud-dataplex": "2.2.2", - "packages/google-cloud-dataproc": "5.11.0", + "packages/google-cloud-dataproc": "5.13.0", "packages/google-cloud-dataproc-metastore": "1.15.5", "packages/google-cloud-datastream": "1.9.5", - "packages/google-cloud-deploy": "2.0.1", + "packages/google-cloud-deploy": "2.1.0", "packages/google-cloud-developerconnect": "0.1.2", - "packages/google-cloud-dialogflow": "2.31.0", + "packages/google-cloud-dialogflow": "2.33.0", "packages/google-cloud-dialogflow-cx": "1.35.0", "packages/google-cloud-discoveryengine": "0.12.2", - "packages/google-cloud-dlp": "3.22.0", + "packages/google-cloud-dlp": "3.23.0", "packages/google-cloud-dms": "1.9.5", - "packages/google-cloud-documentai": "2.31.0", + "packages/google-cloud-documentai": "2.33.0", "packages/google-cloud-domains": "1.7.5", "packages/google-cloud-edgecontainer": "0.5.11", "packages/google-cloud-edgenetwork": "0.1.11", @@ -87,18 +88,18 @@ "packages/google-cloud-eventarc-publishing": "0.6.11", "packages/google-cloud-filestore": "1.9.5", "packages/google-cloud-functions": "1.17.0", - "packages/google-cloud-gdchardwaremanagement": "0.1.3", + "packages/google-cloud-gdchardwaremanagement": "0.1.4", "packages/google-cloud-gke-backup": "0.5.11", "packages/google-cloud-gke-connect-gateway": "0.9.0", "packages/google-cloud-gke-hub": "1.14.2", - "packages/google-cloud-gke-multicloud": "0.6.12", + "packages/google-cloud-gke-multicloud": "0.6.13", "packages/google-cloud-gsuiteaddons": "0.3.10", "packages/google-cloud-iam": "2.15.2", "packages/google-cloud-iam-logging": "1.3.5", "packages/google-cloud-iap": "1.13.5", "packages/google-cloud-ids": "1.7.5", - "packages/google-cloud-kms": "2.24.2", - "packages/google-cloud-kms-inventory": "0.2.8", + "packages/google-cloud-kms": "3.0.0", + "packages/google-cloud-kms-inventory": "0.2.9", "packages/google-cloud-language": "2.14.0", "packages/google-cloud-life-sciences": "0.9.12", "packages/google-cloud-managed-identities": "1.9.5", @@ -109,17 +110,18 @@ "packages/google-cloud-monitoring": "2.22.2", "packages/google-cloud-monitoring-dashboards": "2.15.3", "packages/google-cloud-monitoring-metrics-scopes": "1.6.5", - "packages/google-cloud-netapp": "0.3.13", + "packages/google-cloud-netapp": "0.3.14", "packages/google-cloud-network-connectivity": "2.4.5", "packages/google-cloud-network-management": "1.18.0", "packages/google-cloud-network-security": "0.9.11", "packages/google-cloud-network-services": "0.5.14", "packages/google-cloud-notebooks": "1.10.5", "packages/google-cloud-optimization": "1.8.5", - "packages/google-cloud-orchestration-airflow": "1.13.1", + "packages/google-cloud-oracledatabase": "0.1.0", + "packages/google-cloud-orchestration-airflow": "1.14.0", "packages/google-cloud-os-config": "1.17.5", "packages/google-cloud-os-login": "2.14.6", - "packages/google-cloud-parallelstore": "0.2.3", + "packages/google-cloud-parallelstore": "0.2.4", "packages/google-cloud-phishing-protection": "1.11.5", "packages/google-cloud-policy-troubleshooter": "1.11.5", "packages/google-cloud-policysimulator": "0.1.8", @@ -137,7 +139,7 @@ "packages/google-cloud-resource-manager": "1.12.5", "packages/google-cloud-resource-settings": "1.9.6", "packages/google-cloud-retail": "1.21.2", - "packages/google-cloud-run": "0.10.8", + "packages/google-cloud-run": "0.10.9", "packages/google-cloud-scheduler": "2.13.5", "packages/google-cloud-secret-manager": "2.20.2", "packages/google-cloud-securesourcemanager": "0.1.8", @@ -177,17 +179,18 @@ "packages/google-cloud-workstations": "0.5.8", "packages/google-geo-type": "0.3.9", "packages/google-maps-addressvalidation": "0.3.13", + "packages/google-maps-areainsights": "0.1.0", "packages/google-maps-fleetengine": "0.2.2", - "packages/google-maps-fleetengine-delivery": "0.2.3", + "packages/google-maps-fleetengine-delivery": "0.2.4", "packages/google-maps-mapsplatformdatasets": "0.4.2", - "packages/google-maps-places": "0.1.17", - "packages/google-maps-routeoptimization": "0.1.2", + "packages/google-maps-places": "0.1.18", + "packages/google-maps-routeoptimization": "0.1.4", "packages/google-maps-routing": "0.6.10", "packages/google-maps-solar": "0.1.2", "packages/google-shopping-css": "0.1.8", - "packages/google-shopping-merchant-accounts": "0.1.3", + "packages/google-shopping-merchant-accounts": "0.2.0", "packages/google-shopping-merchant-conversions": "0.1.3", - "packages/google-shopping-merchant-datasources": "0.1.2", + "packages/google-shopping-merchant-datasources": "0.1.3", "packages/google-shopping-merchant-inventories": "0.1.9", "packages/google-shopping-merchant-lfp": "0.1.3", "packages/google-shopping-merchant-notifications": "0.1.2", diff --git a/CHANGELOG.md b/CHANGELOG.md index 9f4e47fb21b5..3c4f1ec4af65 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,11 +3,12 @@ Please refer to each API's `CHANGELOG.md` file under the `packages/` directory Changelogs ----- - [google-ads-admanager==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) -- [google-ai-generativelanguage==0.6.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) +- [google-ads-marketingplatform-admin==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/CHANGELOG.md) +- [google-ai-generativelanguage==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) - [google-analytics-admin==0.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) -- [google-analytics-data==0.18.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) +- [google-analytics-data==0.18.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) - [google-apps-card==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-card/CHANGELOG.md) -- [google-apps-chat==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) +- [google-apps-chat==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) - [google-apps-events-subscriptions==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-events-subscriptions/CHANGELOG.md) - [google-apps-meet==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/CHANGELOG.md) - [google-apps-script-type==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) @@ -15,22 +16,22 @@ Changelogs - [google-cloud-access-approval==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-approval/CHANGELOG.md) - [google-cloud-advisorynotifications==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) - [google-cloud-alloydb-connectors==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb-connectors/CHANGELOG.md) -- [google-cloud-alloydb==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) +- [google-cloud-alloydb==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) - [google-cloud-api-gateway==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-gateway/CHANGELOG.md) - [google-cloud-api-keys==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) - [google-cloud-apigee-connect==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) - [google-cloud-apigee-registry==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-registry/CHANGELOG.md) -- [google-cloud-apihub==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub/CHANGELOG.md) +- [google-cloud-apihub==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub/CHANGELOG.md) - [google-cloud-appengine-admin==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-admin/CHANGELOG.md) - [google-cloud-appengine-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) - [google-cloud-apphub==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apphub/CHANGELOG.md) - [google-cloud-artifact-registry==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-artifact-registry/CHANGELOG.md) -- [google-cloud-asset==3.26.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) +- [google-cloud-asset==3.26.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) - [google-cloud-assured-workloads==1.12.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/CHANGELOG.md) - [google-cloud-automl==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) - [google-cloud-backupdr==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) - [google-cloud-bare-metal-solution==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.26](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-batch==0.17.28](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) - [google-cloud-beyondcorp-appconnections==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) - [google-cloud-beyondcorp-appconnectors==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) - [google-cloud-beyondcorp-appgateways==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) @@ -41,17 +42,17 @@ Changelogs - [google-cloud-bigquery-connection==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) - [google-cloud-bigquery-data-exchange==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) - [google-cloud-bigquery-datapolicies==0.6.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) -- [google-cloud-bigquery-datatransfer==3.15.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) +- [google-cloud-bigquery-datatransfer==3.15.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) - [google-cloud-bigquery-logging==1.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) - [google-cloud-bigquery-migration==0.11.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) - [google-cloud-bigquery-reservation==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) - [google-cloud-billing-budgets==1.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) - [google-cloud-billing==1.13.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) - [google-cloud-binary-authorization==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-binary-authorization/CHANGELOG.md) -- [google-cloud-build==3.24.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) +- [google-cloud-build==3.25.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) - [google-cloud-certificate-manager==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) - [google-cloud-channel==1.18.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) -- [google-cloud-cloudcontrolspartner==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) +- [google-cloud-cloudcontrolspartner==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) - [google-cloud-cloudquotas==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudquotas/CHANGELOG.md) - [google-cloud-commerce-consumer-procurement==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md) - [google-cloud-common==1.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-common/CHANGELOG.md) @@ -59,7 +60,7 @@ Changelogs - [google-cloud-confidentialcomputing==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) - [google-cloud-config==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) - [google-cloud-contact-center-insights==1.17.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) -- [google-cloud-container==2.50.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) +- [google-cloud-container==2.51.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) - [google-cloud-containeranalysis==2.14.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-containeranalysis/CHANGELOG.md) - [google-cloud-contentwarehouse==0.7.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) - [google-cloud-data-fusion==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) @@ -71,37 +72,37 @@ Changelogs - [google-cloud-datalabeling==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) - [google-cloud-dataplex==2.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) - [google-cloud-dataproc-metastore==1.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) -- [google-cloud-dataproc==5.10.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) +- [google-cloud-dataproc==5.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) - [google-cloud-datastream==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) - [google-cloud-deploy==2.0.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) - [google-cloud-developerconnect==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/CHANGELOG.md) - [google-cloud-dialogflow-cx==1.35.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) -- [google-cloud-dialogflow==2.31.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) -- [google-cloud-discoveryengine==0.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) -- [google-cloud-dlp==3.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) +- [google-cloud-dialogflow==2.32.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) +- [google-cloud-discoveryengine==0.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) +- [google-cloud-dlp==3.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) - [google-cloud-dms==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) -- [google-cloud-documentai==2.31.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) +- [google-cloud-documentai==2.32.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) - [google-cloud-domains==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) - [google-cloud-edgecontainer==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) -- [google-cloud-edgenetwork==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) +- [google-cloud-edgenetwork==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) - [google-cloud-enterpriseknowledgegraph==0.3.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) - [google-cloud-essential-contacts==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) - [google-cloud-eventarc-publishing==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) - [google-cloud-eventarc==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) - [google-cloud-filestore==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-filestore/CHANGELOG.md) - [google-cloud-functions==1.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) -- [google-cloud-gdchardwaremanagement==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) +- [google-cloud-gdchardwaremanagement==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) - [google-cloud-gke-backup==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) - [google-cloud-gke-connect-gateway==0.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) - [google-cloud-gke-hub==1.14.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) -- [google-cloud-gke-multicloud==0.6.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) +- [google-cloud-gke-multicloud==0.6.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) - [google-cloud-gsuiteaddons==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) - [google-cloud-iam-logging==1.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) - [google-cloud-iam==2.15.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) - [google-cloud-iap==1.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) - [google-cloud-ids==1.7.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) - [google-cloud-kms-inventory==0.2.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) -- [google-cloud-kms==2.24.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) +- [google-cloud-kms==3.0.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) - [google-cloud-language==2.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) - [google-cloud-life-sciences==0.9.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) - [google-cloud-managed-identities==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) @@ -112,14 +113,15 @@ Changelogs - [google-cloud-monitoring-dashboards==2.15.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-dashboards/CHANGELOG.md) - [google-cloud-monitoring-metrics-scopes==1.6.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) - [google-cloud-monitoring==2.22.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) -- [google-cloud-netapp==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) +- [google-cloud-netapp==0.3.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) - [google-cloud-network-connectivity==2.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) - [google-cloud-network-management==1.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) - [google-cloud-network-security==0.9.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) - [google-cloud-network-services==0.5.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) - [google-cloud-notebooks==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) - [google-cloud-optimization==1.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) -- [google-cloud-orchestration-airflow==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) +- [google-cloud-oracledatabase==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase/CHANGELOG.md) +- [google-cloud-orchestration-airflow==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) - [google-cloud-os-config==1.17.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-config/CHANGELOG.md) - [google-cloud-os-login==2.14.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-login/CHANGELOG.md) - [google-cloud-parallelstore==0.2.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-parallelstore/CHANGELOG.md) @@ -132,7 +134,7 @@ Changelogs - [google-cloud-privilegedaccessmanager==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-privilegedaccessmanager/CHANGELOG.md) - [google-cloud-public-ca==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) - [google-cloud-rapidmigrationassessment==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) -- [google-cloud-recaptcha-enterprise==1.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) +- [google-cloud-recaptcha-enterprise==1.22.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) - [google-cloud-recommendations-ai==0.10.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) - [google-cloud-recommender==2.15.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) - [google-cloud-redis-cluster==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) @@ -148,7 +150,7 @@ Changelogs - [google-cloud-securitycentermanagement==0.1.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycentermanagement/CHANGELOG.md) - [google-cloud-service-control==1.12.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) - [google-cloud-service-directory==1.11.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) -- [google-cloud-service-management==1.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) +- [google-cloud-service-management==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) - [google-cloud-service-usage==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) - [google-cloud-servicehealth==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-servicehealth/CHANGELOG.md) - [google-cloud-shell==1.9.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) @@ -170,7 +172,7 @@ Changelogs - [google-cloud-video-transcoder==1.12.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) - [google-cloud-videointelligence==2.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-videointelligence/CHANGELOG.md) - [google-cloud-vision==3.7.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vision/CHANGELOG.md) -- [google-cloud-visionai==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) +- [google-cloud-visionai==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) - [google-cloud-vm-migration==1.8.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) - [google-cloud-vmwareengine==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) - [google-cloud-vpc-access==1.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) @@ -180,11 +182,12 @@ Changelogs - [google-cloud-workstations==0.5.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) - [google-geo-type==0.3.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) - [google-maps-addressvalidation==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) -- [google-maps-fleetengine-delivery==0.2.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) +- [google-maps-areainsights==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-areainsights/CHANGELOG.md) +- [google-maps-fleetengine-delivery==0.2.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) - [google-maps-fleetengine==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) - [google-maps-mapsplatformdatasets==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) -- [google-maps-places==0.1.17](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) -- [google-maps-routeoptimization==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) +- [google-maps-places==0.1.18](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) +- [google-maps-routeoptimization==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) - [google-maps-routing==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) - [google-maps-solar==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-solar/CHANGELOG.md) - [google-shopping-css==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) diff --git a/packages/google-ads-admanager/CHANGELOG.md b/packages/google-ads-admanager/CHANGELOG.md index 2a855789274e..3a1de1e5b43f 100644 --- a/packages/google-ads-admanager/CHANGELOG.md +++ b/packages/google-ads-admanager/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-ads-admanager-v0.1.2...google-ads-admanager-v0.2.0) (2024-10-08) + + +### ⚠ BREAKING CHANGES + +* Removed closed beta services that had data discrepancies with the SOAP API + +### Features + +* Added support for Interactive Reporting ([6db79dc](https://github.com/googleapis/google-cloud-python/commit/6db79dc964b540f1c9c21d96122e4916aca66d98)) + + +### Bug Fixes + +* Removed closed beta services that had data discrepancies with the SOAP API ([6db79dc](https://github.com/googleapis/google-cloud-python/commit/6db79dc964b540f1c9c21d96122e4916aca66d98)) + ## [0.1.2](https://github.com/googleapis/google-cloud-python/compare/google-ads-admanager-v0.1.1...google-ads-admanager-v0.1.2) (2024-07-30) diff --git a/packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst b/packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst deleted file mode 100644 index 7ccc095d3628..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/ad_partner_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AdPartnerService ----------------------------------- - -.. automodule:: google.ads.admanager_v1.services.ad_partner_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.ad_partner_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/contact_service.rst b/packages/google-ads-admanager/docs/admanager_v1/contact_service.rst deleted file mode 100644 index 478ccc08a803..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/contact_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -ContactService --------------------------------- - -.. automodule:: google.ads.admanager_v1.services.contact_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.contact_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/creative_service.rst b/packages/google-ads-admanager/docs/admanager_v1/creative_service.rst deleted file mode 100644 index 2f4e457a7ab3..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/creative_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -CreativeService ---------------------------------- - -.. automodule:: google.ads.admanager_v1.services.creative_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.creative_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst b/packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst new file mode 100644 index 000000000000..d4e1f7fa5634 --- /dev/null +++ b/packages/google-ads-admanager/docs/admanager_v1/entity_signals_mapping_service.rst @@ -0,0 +1,10 @@ +EntitySignalsMappingService +--------------------------------------------- + +.. automodule:: google.ads.admanager_v1.services.entity_signals_mapping_service + :members: + :inherited-members: + +.. automodule:: google.ads.admanager_v1.services.entity_signals_mapping_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/label_service.rst b/packages/google-ads-admanager/docs/admanager_v1/label_service.rst deleted file mode 100644 index f3408d1767f5..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/label_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -LabelService ------------------------------- - -.. automodule:: google.ads.admanager_v1.services.label_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.label_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst b/packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst deleted file mode 100644 index 6b4388d90085..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/line_item_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -LineItemService ---------------------------------- - -.. automodule:: google.ads.admanager_v1.services.line_item_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.line_item_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/report_service.rst b/packages/google-ads-admanager/docs/admanager_v1/report_service.rst index 96130cad2289..a655ad73d7a3 100644 --- a/packages/google-ads-admanager/docs/admanager_v1/report_service.rst +++ b/packages/google-ads-admanager/docs/admanager_v1/report_service.rst @@ -4,3 +4,7 @@ ReportService .. automodule:: google.ads.admanager_v1.services.report_service :members: :inherited-members: + +.. automodule:: google.ads.admanager_v1.services.report_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/services_.rst b/packages/google-ads-admanager/docs/admanager_v1/services_.rst index a9b93b8a07c2..a1522b62dc40 100644 --- a/packages/google-ads-admanager/docs/admanager_v1/services_.rst +++ b/packages/google-ads-admanager/docs/admanager_v1/services_.rst @@ -3,20 +3,16 @@ Services for Google Ads Admanager v1 API .. toctree:: :maxdepth: 2 - ad_partner_service ad_unit_service company_service - contact_service - creative_service custom_field_service custom_targeting_key_service custom_targeting_value_service - label_service - line_item_service + entity_signals_mapping_service network_service order_service placement_service report_service role_service - team_service + taxonomy_category_service user_service diff --git a/packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst b/packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst new file mode 100644 index 000000000000..61f13e739e19 --- /dev/null +++ b/packages/google-ads-admanager/docs/admanager_v1/taxonomy_category_service.rst @@ -0,0 +1,10 @@ +TaxonomyCategoryService +----------------------------------------- + +.. automodule:: google.ads.admanager_v1.services.taxonomy_category_service + :members: + :inherited-members: + +.. automodule:: google.ads.admanager_v1.services.taxonomy_category_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/team_service.rst b/packages/google-ads-admanager/docs/admanager_v1/team_service.rst deleted file mode 100644 index 4d3e14c6f6c1..000000000000 --- a/packages/google-ads-admanager/docs/admanager_v1/team_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -TeamService ------------------------------ - -.. automodule:: google.ads.admanager_v1.services.team_service - :members: - :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.team_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/docs/admanager_v1/user_service.rst b/packages/google-ads-admanager/docs/admanager_v1/user_service.rst index 9bae86979749..c7be2db4394e 100644 --- a/packages/google-ads-admanager/docs/admanager_v1/user_service.rst +++ b/packages/google-ads-admanager/docs/admanager_v1/user_service.rst @@ -4,7 +4,3 @@ UserService .. automodule:: google.ads.admanager_v1.services.user_service :members: :inherited-members: - -.. automodule:: google.ads.admanager_v1.services.user_service.pagers - :members: - :inherited-members: diff --git a/packages/google-ads-admanager/google/ads/admanager/__init__.py b/packages/google-ads-admanager/google/ads/admanager/__init__.py index 39067144e427..9672929d7e9f 100644 --- a/packages/google-ads-admanager/google/ads/admanager/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager/__init__.py @@ -18,15 +18,8 @@ __version__ = package_version.__version__ -from google.ads.admanager_v1.services.ad_partner_service.client import ( - AdPartnerServiceClient, -) from google.ads.admanager_v1.services.ad_unit_service.client import AdUnitServiceClient from google.ads.admanager_v1.services.company_service.client import CompanyServiceClient -from google.ads.admanager_v1.services.contact_service.client import ContactServiceClient -from google.ads.admanager_v1.services.creative_service.client import ( - CreativeServiceClient, -) from google.ads.admanager_v1.services.custom_field_service.client import ( CustomFieldServiceClient, ) @@ -36,9 +29,8 @@ from google.ads.admanager_v1.services.custom_targeting_value_service.client import ( CustomTargetingValueServiceClient, ) -from google.ads.admanager_v1.services.label_service.client import LabelServiceClient -from google.ads.admanager_v1.services.line_item_service.client import ( - LineItemServiceClient, +from google.ads.admanager_v1.services.entity_signals_mapping_service.client import ( + EntitySignalsMappingServiceClient, ) from google.ads.admanager_v1.services.network_service.client import NetworkServiceClient from google.ads.admanager_v1.services.order_service.client import OrderServiceClient @@ -47,76 +39,66 @@ ) from google.ads.admanager_v1.services.report_service.client import ReportServiceClient from google.ads.admanager_v1.services.role_service.client import RoleServiceClient -from google.ads.admanager_v1.services.team_service.client import TeamServiceClient -from google.ads.admanager_v1.services.user_service.client import UserServiceClient -from google.ads.admanager_v1.types.ad_partner_declaration import ( - AdPartnerDeclaration, - DeclarationTypeEnum, +from google.ads.admanager_v1.services.taxonomy_category_service.client import ( + TaxonomyCategoryServiceClient, ) -from google.ads.admanager_v1.types.ad_partner_service import ( - AdPartner, - GetAdPartnerRequest, - ListAdPartnersRequest, - ListAdPartnersResponse, +from google.ads.admanager_v1.services.user_service.client import UserServiceClient +from google.ads.admanager_v1.types.ad_unit_enums import ( + AdUnitStatusEnum, + SmartSizeModeEnum, + TargetWindowEnum, ) -from google.ads.admanager_v1.types.ad_unit_enums import AppliedAdsenseEnabledEnum -from google.ads.admanager_v1.types.ad_unit_service import ( +from google.ads.admanager_v1.types.ad_unit_messages import ( AdUnit, AdUnitParent, - GetAdUnitRequest, + AdUnitSize, LabelFrequencyCap, +) +from google.ads.admanager_v1.types.ad_unit_service import ( + GetAdUnitRequest, + ListAdUnitSizesRequest, + ListAdUnitSizesResponse, ListAdUnitsRequest, ListAdUnitsResponse, - SmartSizeModeEnum, - TargetWindowEnum, ) -from google.ads.admanager_v1.types.ad_unit_size import AdUnitSize from google.ads.admanager_v1.types.admanager_error import AdManagerError from google.ads.admanager_v1.types.applied_label import AppliedLabel from google.ads.admanager_v1.types.company_credit_status_enum import ( CompanyCreditStatusEnum, ) +from google.ads.admanager_v1.types.company_messages import Company from google.ads.admanager_v1.types.company_service import ( - Company, GetCompanyRequest, ListCompaniesRequest, ListCompaniesResponse, ) from google.ads.admanager_v1.types.company_type_enum import CompanyTypeEnum -from google.ads.admanager_v1.types.computed_status_enum import ComputedStatusEnum -from google.ads.admanager_v1.types.contact_service import ( - Contact, - GetContactRequest, - ListContactsRequest, - ListContactsResponse, -) -from google.ads.admanager_v1.types.creative_placeholder import CreativePlaceholder -from google.ads.admanager_v1.types.creative_service import ( - Creative, - GetCreativeRequest, - ListCreativesRequest, - ListCreativesResponse, -) +from google.ads.admanager_v1.types.contact_messages import Contact from google.ads.admanager_v1.types.custom_field_enums import ( CustomFieldDataTypeEnum, CustomFieldEntityTypeEnum, CustomFieldStatusEnum, CustomFieldVisibilityEnum, ) -from google.ads.admanager_v1.types.custom_field_service import ( +from google.ads.admanager_v1.types.custom_field_messages import ( CustomField, CustomFieldOption, +) +from google.ads.admanager_v1.types.custom_field_service import ( GetCustomFieldRequest, ListCustomFieldsRequest, ListCustomFieldsResponse, ) +from google.ads.admanager_v1.types.custom_field_value import CustomFieldValue from google.ads.admanager_v1.types.custom_targeting_key_enums import ( CustomTargetingKeyReportableTypeEnum, CustomTargetingKeyStatusEnum, CustomTargetingKeyTypeEnum, ) -from google.ads.admanager_v1.types.custom_targeting_key_service import ( +from google.ads.admanager_v1.types.custom_targeting_key_messages import ( CustomTargetingKey, +) +from google.ads.admanager_v1.types.custom_targeting_key_service import ( GetCustomTargetingKeyRequest, ListCustomTargetingKeysRequest, ListCustomTargetingKeysResponse, @@ -125,109 +107,114 @@ CustomTargetingValueMatchTypeEnum, CustomTargetingValueStatusEnum, ) -from google.ads.admanager_v1.types.custom_targeting_value_service import ( +from google.ads.admanager_v1.types.custom_targeting_value_messages import ( CustomTargetingValue, +) +from google.ads.admanager_v1.types.custom_targeting_value_service import ( GetCustomTargetingValueRequest, ListCustomTargetingValuesRequest, ListCustomTargetingValuesResponse, ) -from google.ads.admanager_v1.types.environment_type_enum import EnvironmentTypeEnum -from google.ads.admanager_v1.types.frequency_cap import FrequencyCap, TimeUnitEnum -from google.ads.admanager_v1.types.goal import Goal, GoalTypeEnum, UnitTypeEnum -from google.ads.admanager_v1.types.label_service import ( - GetLabelRequest, - Label, - ListLabelsRequest, - ListLabelsResponse, +from google.ads.admanager_v1.types.entity_signals_mapping_messages import ( + EntitySignalsMapping, ) -from google.ads.admanager_v1.types.line_item_enums import ( - CreativeRotationTypeEnum, - DeliveryRateTypeEnum, - LineItemCostTypeEnum, - LineItemDiscountTypeEnum, - LineItemTypeEnum, - ReservationStatusEnum, +from google.ads.admanager_v1.types.entity_signals_mapping_service import ( + BatchCreateEntitySignalsMappingsRequest, + BatchCreateEntitySignalsMappingsResponse, + BatchUpdateEntitySignalsMappingsRequest, + BatchUpdateEntitySignalsMappingsResponse, + CreateEntitySignalsMappingRequest, + GetEntitySignalsMappingRequest, + ListEntitySignalsMappingsRequest, + ListEntitySignalsMappingsResponse, + UpdateEntitySignalsMappingRequest, ) -from google.ads.admanager_v1.types.line_item_service import ( - GetLineItemRequest, - LineItem, - ListLineItemsRequest, - ListLineItemsResponse, +from google.ads.admanager_v1.types.environment_type_enum import EnvironmentTypeEnum +from google.ads.admanager_v1.types.frequency_cap import FrequencyCap +from google.ads.admanager_v1.types.label_messages import Label +from google.ads.admanager_v1.types.network_messages import Network +from google.ads.admanager_v1.types.network_service import ( + GetNetworkRequest, + ListNetworksRequest, + ListNetworksResponse, ) -from google.ads.admanager_v1.types.network_service import GetNetworkRequest, Network +from google.ads.admanager_v1.types.order_enums import OrderStatusEnum +from google.ads.admanager_v1.types.order_messages import Order from google.ads.admanager_v1.types.order_service import ( GetOrderRequest, ListOrdersRequest, ListOrdersResponse, - Order, ) from google.ads.admanager_v1.types.placement_enums import PlacementStatusEnum +from google.ads.admanager_v1.types.placement_messages import Placement from google.ads.admanager_v1.types.placement_service import ( GetPlacementRequest, ListPlacementsRequest, ListPlacementsResponse, - Placement, ) from google.ads.admanager_v1.types.report_service import ( - ExportSavedReportMetadata, - ExportSavedReportRequest, - ExportSavedReportResponse, + CreateReportRequest, + FetchReportResultRowsRequest, + FetchReportResultRowsResponse, + GetReportRequest, + ListReportsRequest, + ListReportsResponse, Report, + ReportDefinition, + RunReportMetadata, + RunReportRequest, + RunReportResponse, + Schedule, + ScheduleOptions, + UpdateReportRequest, ) +from google.ads.admanager_v1.types.role_enums import RoleStatusEnum +from google.ads.admanager_v1.types.role_messages import Role from google.ads.admanager_v1.types.role_service import ( GetRoleRequest, ListRolesRequest, ListRolesResponse, - Role, -) -from google.ads.admanager_v1.types.size import Size, SizeTypeEnum -from google.ads.admanager_v1.types.team_service import ( - GetTeamRequest, - ListTeamsRequest, - ListTeamsResponse, - Team, ) -from google.ads.admanager_v1.types.user_service import ( - GetUserRequest, - ListUsersRequest, - ListUsersResponse, - User, +from google.ads.admanager_v1.types.size import Size +from google.ads.admanager_v1.types.size_type_enum import SizeTypeEnum +from google.ads.admanager_v1.types.taxonomy_category_messages import TaxonomyCategory +from google.ads.admanager_v1.types.taxonomy_category_service import ( + GetTaxonomyCategoryRequest, + ListTaxonomyCategoriesRequest, + ListTaxonomyCategoriesResponse, ) +from google.ads.admanager_v1.types.taxonomy_type_enum import TaxonomyTypeEnum +from google.ads.admanager_v1.types.team_messages import Team +from google.ads.admanager_v1.types.time_unit_enum import TimeUnitEnum +from google.ads.admanager_v1.types.user_messages import User +from google.ads.admanager_v1.types.user_service import GetUserRequest __all__ = ( - "AdPartnerServiceClient", "AdUnitServiceClient", "CompanyServiceClient", - "ContactServiceClient", - "CreativeServiceClient", "CustomFieldServiceClient", "CustomTargetingKeyServiceClient", "CustomTargetingValueServiceClient", - "LabelServiceClient", - "LineItemServiceClient", + "EntitySignalsMappingServiceClient", "NetworkServiceClient", "OrderServiceClient", "PlacementServiceClient", "ReportServiceClient", "RoleServiceClient", - "TeamServiceClient", + "TaxonomyCategoryServiceClient", "UserServiceClient", - "AdPartnerDeclaration", - "DeclarationTypeEnum", - "AdPartner", - "GetAdPartnerRequest", - "ListAdPartnersRequest", - "ListAdPartnersResponse", - "AppliedAdsenseEnabledEnum", + "AdUnitStatusEnum", + "SmartSizeModeEnum", + "TargetWindowEnum", "AdUnit", "AdUnitParent", - "GetAdUnitRequest", + "AdUnitSize", "LabelFrequencyCap", + "GetAdUnitRequest", + "ListAdUnitSizesRequest", + "ListAdUnitSizesResponse", "ListAdUnitsRequest", "ListAdUnitsResponse", - "SmartSizeModeEnum", - "TargetWindowEnum", - "AdUnitSize", "AdManagerError", "AppliedLabel", "CompanyCreditStatusEnum", @@ -236,16 +223,7 @@ "ListCompaniesRequest", "ListCompaniesResponse", "CompanyTypeEnum", - "ComputedStatusEnum", "Contact", - "GetContactRequest", - "ListContactsRequest", - "ListContactsResponse", - "CreativePlaceholder", - "Creative", - "GetCreativeRequest", - "ListCreativesRequest", - "ListCreativesResponse", "CustomFieldDataTypeEnum", "CustomFieldEntityTypeEnum", "CustomFieldStatusEnum", @@ -255,6 +233,7 @@ "GetCustomFieldRequest", "ListCustomFieldsRequest", "ListCustomFieldsResponse", + "CustomFieldValue", "CustomTargetingKeyReportableTypeEnum", "CustomTargetingKeyStatusEnum", "CustomTargetingKeyTypeEnum", @@ -268,53 +247,61 @@ "GetCustomTargetingValueRequest", "ListCustomTargetingValuesRequest", "ListCustomTargetingValuesResponse", + "EntitySignalsMapping", + "BatchCreateEntitySignalsMappingsRequest", + "BatchCreateEntitySignalsMappingsResponse", + "BatchUpdateEntitySignalsMappingsRequest", + "BatchUpdateEntitySignalsMappingsResponse", + "CreateEntitySignalsMappingRequest", + "GetEntitySignalsMappingRequest", + "ListEntitySignalsMappingsRequest", + "ListEntitySignalsMappingsResponse", + "UpdateEntitySignalsMappingRequest", "EnvironmentTypeEnum", "FrequencyCap", - "TimeUnitEnum", - "Goal", - "GoalTypeEnum", - "UnitTypeEnum", - "GetLabelRequest", "Label", - "ListLabelsRequest", - "ListLabelsResponse", - "CreativeRotationTypeEnum", - "DeliveryRateTypeEnum", - "LineItemCostTypeEnum", - "LineItemDiscountTypeEnum", - "LineItemTypeEnum", - "ReservationStatusEnum", - "GetLineItemRequest", - "LineItem", - "ListLineItemsRequest", - "ListLineItemsResponse", - "GetNetworkRequest", "Network", + "GetNetworkRequest", + "ListNetworksRequest", + "ListNetworksResponse", + "OrderStatusEnum", + "Order", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", - "Order", "PlacementStatusEnum", + "Placement", "GetPlacementRequest", "ListPlacementsRequest", "ListPlacementsResponse", - "Placement", - "ExportSavedReportMetadata", - "ExportSavedReportRequest", - "ExportSavedReportResponse", + "CreateReportRequest", + "FetchReportResultRowsRequest", + "FetchReportResultRowsResponse", + "GetReportRequest", + "ListReportsRequest", + "ListReportsResponse", "Report", + "ReportDefinition", + "RunReportMetadata", + "RunReportRequest", + "RunReportResponse", + "Schedule", + "ScheduleOptions", + "UpdateReportRequest", + "RoleStatusEnum", + "Role", "GetRoleRequest", "ListRolesRequest", "ListRolesResponse", - "Role", "Size", "SizeTypeEnum", - "GetTeamRequest", - "ListTeamsRequest", - "ListTeamsResponse", + "TaxonomyCategory", + "GetTaxonomyCategoryRequest", + "ListTaxonomyCategoriesRequest", + "ListTaxonomyCategoriesResponse", + "TaxonomyTypeEnum", "Team", - "GetUserRequest", - "ListUsersRequest", - "ListUsersResponse", + "TimeUnitEnum", "User", + "GetUserRequest", ) diff --git a/packages/google-ads-admanager/google/ads/admanager/gapic_version.py b/packages/google-ads-admanager/google/ads/admanager/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-ads-admanager/google/ads/admanager/gapic_version.py +++ b/packages/google-ads-admanager/google/ads/admanager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py index b13eac5f2835..e2d73bf488ba 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/__init__.py @@ -18,86 +18,59 @@ __version__ = package_version.__version__ -from .services.ad_partner_service import AdPartnerServiceClient from .services.ad_unit_service import AdUnitServiceClient from .services.company_service import CompanyServiceClient -from .services.contact_service import ContactServiceClient -from .services.creative_service import CreativeServiceClient from .services.custom_field_service import CustomFieldServiceClient from .services.custom_targeting_key_service import CustomTargetingKeyServiceClient from .services.custom_targeting_value_service import CustomTargetingValueServiceClient -from .services.label_service import LabelServiceClient -from .services.line_item_service import LineItemServiceClient +from .services.entity_signals_mapping_service import EntitySignalsMappingServiceClient from .services.network_service import NetworkServiceClient from .services.order_service import OrderServiceClient from .services.placement_service import PlacementServiceClient from .services.report_service import ReportServiceClient from .services.role_service import RoleServiceClient -from .services.team_service import TeamServiceClient +from .services.taxonomy_category_service import TaxonomyCategoryServiceClient from .services.user_service import UserServiceClient -from .types.ad_partner_declaration import AdPartnerDeclaration, DeclarationTypeEnum -from .types.ad_partner_service import ( - AdPartner, - GetAdPartnerRequest, - ListAdPartnersRequest, - ListAdPartnersResponse, -) -from .types.ad_unit_enums import AppliedAdsenseEnabledEnum +from .types.ad_unit_enums import AdUnitStatusEnum, SmartSizeModeEnum, TargetWindowEnum +from .types.ad_unit_messages import AdUnit, AdUnitParent, AdUnitSize, LabelFrequencyCap from .types.ad_unit_service import ( - AdUnit, - AdUnitParent, GetAdUnitRequest, - LabelFrequencyCap, + ListAdUnitSizesRequest, + ListAdUnitSizesResponse, ListAdUnitsRequest, ListAdUnitsResponse, - SmartSizeModeEnum, - TargetWindowEnum, ) -from .types.ad_unit_size import AdUnitSize from .types.admanager_error import AdManagerError from .types.applied_label import AppliedLabel from .types.company_credit_status_enum import CompanyCreditStatusEnum +from .types.company_messages import Company from .types.company_service import ( - Company, GetCompanyRequest, ListCompaniesRequest, ListCompaniesResponse, ) from .types.company_type_enum import CompanyTypeEnum -from .types.computed_status_enum import ComputedStatusEnum -from .types.contact_service import ( - Contact, - GetContactRequest, - ListContactsRequest, - ListContactsResponse, -) -from .types.creative_placeholder import CreativePlaceholder -from .types.creative_service import ( - Creative, - GetCreativeRequest, - ListCreativesRequest, - ListCreativesResponse, -) +from .types.contact_messages import Contact from .types.custom_field_enums import ( CustomFieldDataTypeEnum, CustomFieldEntityTypeEnum, CustomFieldStatusEnum, CustomFieldVisibilityEnum, ) +from .types.custom_field_messages import CustomField, CustomFieldOption from .types.custom_field_service import ( - CustomField, - CustomFieldOption, GetCustomFieldRequest, ListCustomFieldsRequest, ListCustomFieldsResponse, ) +from .types.custom_field_value import CustomFieldValue from .types.custom_targeting_key_enums import ( CustomTargetingKeyReportableTypeEnum, CustomTargetingKeyStatusEnum, CustomTargetingKeyTypeEnum, ) +from .types.custom_targeting_key_messages import CustomTargetingKey from .types.custom_targeting_key_service import ( - CustomTargetingKey, GetCustomTargetingKeyRequest, ListCustomTargetingKeysRequest, ListCustomTargetingKeysResponse, @@ -106,103 +79,102 @@ CustomTargetingValueMatchTypeEnum, CustomTargetingValueStatusEnum, ) +from .types.custom_targeting_value_messages import CustomTargetingValue from .types.custom_targeting_value_service import ( - CustomTargetingValue, GetCustomTargetingValueRequest, ListCustomTargetingValuesRequest, ListCustomTargetingValuesResponse, ) -from .types.environment_type_enum import EnvironmentTypeEnum -from .types.frequency_cap import FrequencyCap, TimeUnitEnum -from .types.goal import Goal, GoalTypeEnum, UnitTypeEnum -from .types.label_service import ( - GetLabelRequest, - Label, - ListLabelsRequest, - ListLabelsResponse, -) -from .types.line_item_enums import ( - CreativeRotationTypeEnum, - DeliveryRateTypeEnum, - LineItemCostTypeEnum, - LineItemDiscountTypeEnum, - LineItemTypeEnum, - ReservationStatusEnum, +from .types.entity_signals_mapping_messages import EntitySignalsMapping +from .types.entity_signals_mapping_service import ( + BatchCreateEntitySignalsMappingsRequest, + BatchCreateEntitySignalsMappingsResponse, + BatchUpdateEntitySignalsMappingsRequest, + BatchUpdateEntitySignalsMappingsResponse, + CreateEntitySignalsMappingRequest, + GetEntitySignalsMappingRequest, + ListEntitySignalsMappingsRequest, + ListEntitySignalsMappingsResponse, + UpdateEntitySignalsMappingRequest, ) -from .types.line_item_service import ( - GetLineItemRequest, - LineItem, - ListLineItemsRequest, - ListLineItemsResponse, -) -from .types.network_service import GetNetworkRequest, Network -from .types.order_service import ( - GetOrderRequest, - ListOrdersRequest, - ListOrdersResponse, - Order, +from .types.environment_type_enum import EnvironmentTypeEnum +from .types.frequency_cap import FrequencyCap +from .types.label_messages import Label +from .types.network_messages import Network +from .types.network_service import ( + GetNetworkRequest, + ListNetworksRequest, + ListNetworksResponse, ) +from .types.order_enums import OrderStatusEnum +from .types.order_messages import Order +from .types.order_service import GetOrderRequest, ListOrdersRequest, ListOrdersResponse from .types.placement_enums import PlacementStatusEnum +from .types.placement_messages import Placement from .types.placement_service import ( GetPlacementRequest, ListPlacementsRequest, ListPlacementsResponse, - Placement, ) from .types.report_service import ( - ExportSavedReportMetadata, - ExportSavedReportRequest, - ExportSavedReportResponse, + CreateReportRequest, + FetchReportResultRowsRequest, + FetchReportResultRowsResponse, + GetReportRequest, + ListReportsRequest, + ListReportsResponse, Report, + ReportDefinition, + RunReportMetadata, + RunReportRequest, + RunReportResponse, + Schedule, + ScheduleOptions, + UpdateReportRequest, ) -from .types.role_service import ( - GetRoleRequest, - ListRolesRequest, - ListRolesResponse, - Role, -) -from .types.size import Size, SizeTypeEnum -from .types.team_service import ( - GetTeamRequest, - ListTeamsRequest, - ListTeamsResponse, - Team, -) -from .types.user_service import ( - GetUserRequest, - ListUsersRequest, - ListUsersResponse, - User, +from .types.role_enums import RoleStatusEnum +from .types.role_messages import Role +from .types.role_service import GetRoleRequest, ListRolesRequest, ListRolesResponse +from .types.size import Size +from .types.size_type_enum import SizeTypeEnum +from .types.taxonomy_category_messages import TaxonomyCategory +from .types.taxonomy_category_service import ( + GetTaxonomyCategoryRequest, + ListTaxonomyCategoriesRequest, + ListTaxonomyCategoriesResponse, ) +from .types.taxonomy_type_enum import TaxonomyTypeEnum +from .types.team_messages import Team +from .types.time_unit_enum import TimeUnitEnum +from .types.user_messages import User +from .types.user_service import GetUserRequest __all__ = ( "AdManagerError", - "AdPartner", - "AdPartnerDeclaration", - "AdPartnerServiceClient", "AdUnit", "AdUnitParent", "AdUnitServiceClient", "AdUnitSize", - "AppliedAdsenseEnabledEnum", + "AdUnitStatusEnum", "AppliedLabel", + "BatchCreateEntitySignalsMappingsRequest", + "BatchCreateEntitySignalsMappingsResponse", + "BatchUpdateEntitySignalsMappingsRequest", + "BatchUpdateEntitySignalsMappingsResponse", "Company", "CompanyCreditStatusEnum", "CompanyServiceClient", "CompanyTypeEnum", - "ComputedStatusEnum", "Contact", - "ContactServiceClient", - "Creative", - "CreativePlaceholder", - "CreativeRotationTypeEnum", - "CreativeServiceClient", + "CreateEntitySignalsMappingRequest", + "CreateReportRequest", "CustomField", "CustomFieldDataTypeEnum", "CustomFieldEntityTypeEnum", "CustomFieldOption", "CustomFieldServiceClient", "CustomFieldStatusEnum", + "CustomFieldValue", "CustomFieldVisibilityEnum", "CustomTargetingKey", "CustomTargetingKeyReportableTypeEnum", @@ -213,89 +185,83 @@ "CustomTargetingValueMatchTypeEnum", "CustomTargetingValueServiceClient", "CustomTargetingValueStatusEnum", - "DeclarationTypeEnum", - "DeliveryRateTypeEnum", + "EntitySignalsMapping", + "EntitySignalsMappingServiceClient", "EnvironmentTypeEnum", - "ExportSavedReportMetadata", - "ExportSavedReportRequest", - "ExportSavedReportResponse", + "FetchReportResultRowsRequest", + "FetchReportResultRowsResponse", "FrequencyCap", - "GetAdPartnerRequest", "GetAdUnitRequest", "GetCompanyRequest", - "GetContactRequest", - "GetCreativeRequest", "GetCustomFieldRequest", "GetCustomTargetingKeyRequest", "GetCustomTargetingValueRequest", - "GetLabelRequest", - "GetLineItemRequest", + "GetEntitySignalsMappingRequest", "GetNetworkRequest", "GetOrderRequest", "GetPlacementRequest", + "GetReportRequest", "GetRoleRequest", - "GetTeamRequest", + "GetTaxonomyCategoryRequest", "GetUserRequest", - "Goal", - "GoalTypeEnum", "Label", "LabelFrequencyCap", - "LabelServiceClient", - "LineItem", - "LineItemCostTypeEnum", - "LineItemDiscountTypeEnum", - "LineItemServiceClient", - "LineItemTypeEnum", - "ListAdPartnersRequest", - "ListAdPartnersResponse", + "ListAdUnitSizesRequest", + "ListAdUnitSizesResponse", "ListAdUnitsRequest", "ListAdUnitsResponse", "ListCompaniesRequest", "ListCompaniesResponse", - "ListContactsRequest", - "ListContactsResponse", - "ListCreativesRequest", - "ListCreativesResponse", "ListCustomFieldsRequest", "ListCustomFieldsResponse", "ListCustomTargetingKeysRequest", "ListCustomTargetingKeysResponse", "ListCustomTargetingValuesRequest", "ListCustomTargetingValuesResponse", - "ListLabelsRequest", - "ListLabelsResponse", - "ListLineItemsRequest", - "ListLineItemsResponse", + "ListEntitySignalsMappingsRequest", + "ListEntitySignalsMappingsResponse", + "ListNetworksRequest", + "ListNetworksResponse", "ListOrdersRequest", "ListOrdersResponse", "ListPlacementsRequest", "ListPlacementsResponse", + "ListReportsRequest", + "ListReportsResponse", "ListRolesRequest", "ListRolesResponse", - "ListTeamsRequest", - "ListTeamsResponse", - "ListUsersRequest", - "ListUsersResponse", + "ListTaxonomyCategoriesRequest", + "ListTaxonomyCategoriesResponse", "Network", "NetworkServiceClient", "Order", "OrderServiceClient", + "OrderStatusEnum", "Placement", "PlacementServiceClient", "PlacementStatusEnum", "Report", + "ReportDefinition", "ReportServiceClient", - "ReservationStatusEnum", "Role", "RoleServiceClient", + "RoleStatusEnum", + "RunReportMetadata", + "RunReportRequest", + "RunReportResponse", + "Schedule", + "ScheduleOptions", "Size", "SizeTypeEnum", "SmartSizeModeEnum", "TargetWindowEnum", + "TaxonomyCategory", + "TaxonomyCategoryServiceClient", + "TaxonomyTypeEnum", "Team", - "TeamServiceClient", "TimeUnitEnum", - "UnitTypeEnum", + "UpdateEntitySignalsMappingRequest", + "UpdateReportRequest", "User", "UserServiceClient", ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json index 67680096a5d9..aa173a3cf11e 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json +++ b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_metadata.json @@ -5,25 +5,6 @@ "protoPackage": "google.ads.admanager.v1", "schema": "1.0", "services": { - "AdPartnerService": { - "clients": { - "rest": { - "libraryClient": "AdPartnerServiceClient", - "rpcs": { - "GetAdPartner": { - "methods": [ - "get_ad_partner" - ] - }, - "ListAdPartners": { - "methods": [ - "list_ad_partners" - ] - } - } - } - } - }, "AdUnitService": { "clients": { "rest": { @@ -34,6 +15,11 @@ "get_ad_unit" ] }, + "ListAdUnitSizes": { + "methods": [ + "list_ad_unit_sizes" + ] + }, "ListAdUnits": { "methods": [ "list_ad_units" @@ -62,44 +48,6 @@ } } }, - "ContactService": { - "clients": { - "rest": { - "libraryClient": "ContactServiceClient", - "rpcs": { - "GetContact": { - "methods": [ - "get_contact" - ] - }, - "ListContacts": { - "methods": [ - "list_contacts" - ] - } - } - } - } - }, - "CreativeService": { - "clients": { - "rest": { - "libraryClient": "CreativeServiceClient", - "rpcs": { - "GetCreative": { - "methods": [ - "get_creative" - ] - }, - "ListCreatives": { - "methods": [ - "list_creatives" - ] - } - } - } - } - }, "CustomFieldService": { "clients": { "rest": { @@ -157,38 +105,39 @@ } } }, - "LabelService": { + "EntitySignalsMappingService": { "clients": { "rest": { - "libraryClient": "LabelServiceClient", + "libraryClient": "EntitySignalsMappingServiceClient", "rpcs": { - "GetLabel": { + "BatchCreateEntitySignalsMappings": { "methods": [ - "get_label" + "batch_create_entity_signals_mappings" ] }, - "ListLabels": { + "BatchUpdateEntitySignalsMappings": { "methods": [ - "list_labels" + "batch_update_entity_signals_mappings" ] - } - } - } - } - }, - "LineItemService": { - "clients": { - "rest": { - "libraryClient": "LineItemServiceClient", - "rpcs": { - "GetLineItem": { + }, + "CreateEntitySignalsMapping": { + "methods": [ + "create_entity_signals_mapping" + ] + }, + "GetEntitySignalsMapping": { "methods": [ - "get_line_item" + "get_entity_signals_mapping" ] }, - "ListLineItems": { + "ListEntitySignalsMappings": { "methods": [ - "list_line_items" + "list_entity_signals_mappings" + ] + }, + "UpdateEntitySignalsMapping": { + "methods": [ + "update_entity_signals_mapping" ] } } @@ -204,6 +153,11 @@ "methods": [ "get_network" ] + }, + "ListNetworks": { + "methods": [ + "list_networks" + ] } } } @@ -252,9 +206,34 @@ "rest": { "libraryClient": "ReportServiceClient", "rpcs": { - "ExportSavedReport": { + "CreateReport": { + "methods": [ + "create_report" + ] + }, + "FetchReportResultRows": { + "methods": [ + "fetch_report_result_rows" + ] + }, + "GetReport": { + "methods": [ + "get_report" + ] + }, + "ListReports": { "methods": [ - "export_saved_report" + "list_reports" + ] + }, + "RunReport": { + "methods": [ + "run_report" + ] + }, + "UpdateReport": { + "methods": [ + "update_report" ] } } @@ -280,19 +259,19 @@ } } }, - "TeamService": { + "TaxonomyCategoryService": { "clients": { "rest": { - "libraryClient": "TeamServiceClient", + "libraryClient": "TaxonomyCategoryServiceClient", "rpcs": { - "GetTeam": { + "GetTaxonomyCategory": { "methods": [ - "get_team" + "get_taxonomy_category" ] }, - "ListTeams": { + "ListTaxonomyCategories": { "methods": [ - "list_teams" + "list_taxonomy_categories" ] } } @@ -308,11 +287,6 @@ "methods": [ "get_user" ] - }, - "ListUsers": { - "methods": [ - "list_users" - ] } } } diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py index f1200c52e05a..c8c6cb4564df 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py @@ -55,8 +55,8 @@ from google.ads.admanager_v1.services.ad_unit_service import pagers from google.ads.admanager_v1.types import ( ad_unit_enums, + ad_unit_messages, ad_unit_service, - ad_unit_size, applied_label, ) @@ -732,7 +732,7 @@ def get_ad_unit( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_unit_service.AdUnit: + ) -> ad_unit_messages.AdUnit: r"""API to retrieve an AdUnit object. .. code-block:: python @@ -942,6 +942,124 @@ def sample_list_ad_units(): # Done; return the response. return response + def list_ad_unit_sizes( + self, + request: Optional[Union[ad_unit_service.ListAdUnitSizesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAdUnitSizesPager: + r"""API to retrieve a list of AdUnitSize objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_list_ad_unit_sizes(): + # Create a client + client = admanager_v1.AdUnitServiceClient() + + # Initialize request argument(s) + request = admanager_v1.ListAdUnitSizesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_ad_unit_sizes(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.ListAdUnitSizesRequest, dict]): + The request object. Request object for ListAdUnitSizes + method. + parent (str): + Required. The parent, which owns this collection of + AdUnitSizes. Format: ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.services.ad_unit_service.pagers.ListAdUnitSizesPager: + Response object for + ListAdUnitSizesRequest containing + matching AdUnitSizes. Iterating over + this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, ad_unit_service.ListAdUnitSizesRequest): + request = ad_unit_service.ListAdUnitSizesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_ad_unit_sizes] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAdUnitSizesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "AdUnitServiceClient": return self diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py index 2ae957b1dea0..ebf38bec7995 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import ad_unit_service +from google.ads.admanager_v1.types import ad_unit_messages, ad_unit_service class ListAdUnitsPager: @@ -107,9 +107,83 @@ def pages(self) -> Iterator[ad_unit_service.ListAdUnitsResponse]: ) yield self._response - def __iter__(self) -> Iterator[ad_unit_service.AdUnit]: + def __iter__(self) -> Iterator[ad_unit_messages.AdUnit]: for page in self.pages: yield from page.ad_units def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAdUnitSizesPager: + """A pager for iterating through ``list_ad_unit_sizes`` requests. + + This class thinly wraps an initial + :class:`google.ads.admanager_v1.types.ListAdUnitSizesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``ad_unit_sizes`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAdUnitSizes`` requests and continue to iterate + through the ``ad_unit_sizes`` field on the + corresponding responses. + + All the usual :class:`google.ads.admanager_v1.types.ListAdUnitSizesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., ad_unit_service.ListAdUnitSizesResponse], + request: ad_unit_service.ListAdUnitSizesRequest, + response: ad_unit_service.ListAdUnitSizesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.admanager_v1.types.ListAdUnitSizesRequest): + The initial request object. + response (google.ads.admanager_v1.types.ListAdUnitSizesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ad_unit_service.ListAdUnitSizesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[ad_unit_service.ListAdUnitSizesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[ad_unit_messages.AdUnitSize]: + for page in self.pages: + yield from page.ad_unit_sizes + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py index 948cad87abb1..7852b164a55c 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import ad_unit_service +from google.ads.admanager_v1.types import ad_unit_messages, ad_unit_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -139,6 +139,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_ad_unit_sizes: gapic_v1.method.wrap_method( + self.list_ad_unit_sizes, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -155,7 +160,7 @@ def get_ad_unit( self, ) -> Callable[ [ad_unit_service.GetAdUnitRequest], - Union[ad_unit_service.AdUnit, Awaitable[ad_unit_service.AdUnit]], + Union[ad_unit_messages.AdUnit, Awaitable[ad_unit_messages.AdUnit]], ]: raise NotImplementedError() @@ -171,6 +176,18 @@ def list_ad_units( ]: raise NotImplementedError() + @property + def list_ad_unit_sizes( + self, + ) -> Callable[ + [ad_unit_service.ListAdUnitSizesRequest], + Union[ + ad_unit_service.ListAdUnitSizesResponse, + Awaitable[ad_unit_service.ListAdUnitSizesResponse], + ], + ]: + raise NotImplementedError() + @property def get_operation( self, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py index c6dd9d86e533..2c1ecebf5b66 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import ad_unit_service +from google.ads.admanager_v1.types import ad_unit_messages, ad_unit_service from .base import AdUnitServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -81,6 +81,14 @@ def post_list_ad_units(self, response): logging.log(f"Received response: {response}") return response + def pre_list_ad_unit_sizes(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_ad_unit_sizes(self, response): + logging.log(f"Received response: {response}") + return response + transport = AdUnitServiceRestTransport(interceptor=MyCustomAdUnitServiceInterceptor()) client = AdUnitServiceClient(transport=transport) @@ -100,8 +108,8 @@ def pre_get_ad_unit( return request, metadata def post_get_ad_unit( - self, response: ad_unit_service.AdUnit - ) -> ad_unit_service.AdUnit: + self, response: ad_unit_messages.AdUnit + ) -> ad_unit_messages.AdUnit: """Post-rpc interceptor for get_ad_unit Override in a subclass to manipulate the response @@ -133,6 +141,29 @@ def post_list_ad_units( """ return response + def pre_list_ad_unit_sizes( + self, + request: ad_unit_service.ListAdUnitSizesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ad_unit_service.ListAdUnitSizesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_ad_unit_sizes + + Override in a subclass to manipulate the request or metadata + before they are sent to the AdUnitService server. + """ + return request, metadata + + def post_list_ad_unit_sizes( + self, response: ad_unit_service.ListAdUnitSizesResponse + ) -> ad_unit_service.ListAdUnitSizesResponse: + """Post-rpc interceptor for list_ad_unit_sizes + + Override in a subclass to manipulate the response + after it is returned by the AdUnitService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -274,7 +305,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_unit_service.AdUnit: + ) -> ad_unit_messages.AdUnit: r"""Call the get ad unit method over HTTP. Args: @@ -287,7 +318,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.ad_unit_service.AdUnit: + ~.ad_unit_messages.AdUnit: The AdUnit resource. """ @@ -331,8 +362,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = ad_unit_service.AdUnit() - pb_resp = ad_unit_service.AdUnit.pb(resp) + resp = ad_unit_messages.AdUnit() + pb_resp = ad_unit_messages.AdUnit.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_ad_unit(resp) @@ -427,10 +458,101 @@ def __call__( resp = self._interceptor.post_list_ad_units(resp) return resp + class _ListAdUnitSizes(AdUnitServiceRestStub): + def __hash__(self): + return hash("ListAdUnitSizes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: ad_unit_service.ListAdUnitSizesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> ad_unit_service.ListAdUnitSizesResponse: + r"""Call the list ad unit sizes method over HTTP. + + Args: + request (~.ad_unit_service.ListAdUnitSizesRequest): + The request object. Request object for ListAdUnitSizes + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.ad_unit_service.ListAdUnitSizesResponse: + Response object for + ListAdUnitSizesRequest containing + matching AdUnitSizes. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=networks/*}/adUnitSizes", + }, + ] + request, metadata = self._interceptor.pre_list_ad_unit_sizes( + request, metadata + ) + pb_request = ad_unit_service.ListAdUnitSizesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ad_unit_service.ListAdUnitSizesResponse() + pb_resp = ad_unit_service.ListAdUnitSizesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_ad_unit_sizes(resp) + return resp + @property def get_ad_unit( self, - ) -> Callable[[ad_unit_service.GetAdUnitRequest], ad_unit_service.AdUnit]: + ) -> Callable[[ad_unit_service.GetAdUnitRequest], ad_unit_messages.AdUnit]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetAdUnit(self._session, self._host, self._interceptor) # type: ignore @@ -445,6 +567,17 @@ def list_ad_units( # In C++ this would require a dynamic_cast return self._ListAdUnits(self._session, self._host, self._interceptor) # type: ignore + @property + def list_ad_unit_sizes( + self, + ) -> Callable[ + [ad_unit_service.ListAdUnitSizesRequest], + ad_unit_service.ListAdUnitSizesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAdUnitSizes(self._session, self._host, self._interceptor) # type: ignore + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -476,11 +609,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py index a1bc3d1c6eab..85b49f138017 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py @@ -49,11 +49,13 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.ads.admanager_v1.services.company_service import pagers from google.ads.admanager_v1.types import ( applied_label, company_credit_status_enum, + company_messages, company_service, company_type_enum, ) @@ -753,7 +755,7 @@ def get_company( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> company_service.Company: + ) -> company_messages.Company: r"""API to retrieve a ``Company`` object. .. code-block:: python @@ -902,7 +904,7 @@ def sample_list_companies(): Returns: google.ads.admanager_v1.services.company_service.pagers.ListCompaniesPager: Response object for ListCompaniesRequest containing matching Company - resources. + objects. Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py index 8dd003e78650..7a1c65b16259 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import company_service +from google.ads.admanager_v1.types import company_messages, company_service class ListCompaniesPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[company_service.ListCompaniesResponse]: ) yield self._response - def __iter__(self) -> Iterator[company_service.Company]: + def __iter__(self) -> Iterator[company_messages.Company]: for page in self.pages: yield from page.companies diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py index 0415f3d70be7..3304a05b29c2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import company_service +from google.ads.admanager_v1.types import company_messages, company_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -155,7 +155,7 @@ def get_company( self, ) -> Callable[ [company_service.GetCompanyRequest], - Union[company_service.Company, Awaitable[company_service.Company]], + Union[company_messages.Company, Awaitable[company_messages.Company]], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py index 3692c88b6fde..604ec04faf5a 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import company_service +from google.ads.admanager_v1.types import company_messages, company_service from .base import CompanyServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -100,8 +100,8 @@ def pre_get_company( return request, metadata def post_get_company( - self, response: company_service.Company - ) -> company_service.Company: + self, response: company_messages.Company + ) -> company_messages.Company: """Post-rpc interceptor for get_company Override in a subclass to manipulate the response @@ -274,7 +274,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> company_service.Company: + ) -> company_messages.Company: r"""Call the get company method over HTTP. Args: @@ -287,7 +287,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.company_service.Company: + ~.company_messages.Company: The ``Company`` resource. """ @@ -331,8 +331,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = company_service.Company() - pb_resp = company_service.Company.pb(resp) + resp = company_messages.Company() + pb_resp = company_messages.Company.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_company(resp) @@ -374,7 +374,7 @@ def __call__( Returns: ~.company_service.ListCompaniesResponse: Response object for ``ListCompaniesRequest`` containing - matching ``Company`` resources. + matching ``Company`` objects. """ @@ -428,7 +428,7 @@ def __call__( @property def get_company( self, - ) -> Callable[[company_service.GetCompanyRequest], company_service.Company]: + ) -> Callable[[company_service.GetCompanyRequest], company_messages.Company]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetCompany(self._session, self._host, self._interceptor) # type: ignore @@ -474,11 +474,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py deleted file mode 100644 index 30f2279d1f01..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/pagers.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Iterator, - Optional, - Sequence, - Tuple, - Union, -) - -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[ - retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None - ] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.ads.admanager_v1.types import contact_service - - -class ListContactsPager: - """A pager for iterating through ``list_contacts`` requests. - - This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListContactsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``contacts`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListContacts`` requests and continue to iterate - through the ``contacts`` field on the - corresponding responses. - - All the usual :class:`google.ads.admanager_v1.types.ListContactsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., contact_service.ListContactsResponse], - request: contact_service.ListContactsRequest, - response: contact_service.ListContactsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.ads.admanager_v1.types.ListContactsRequest): - The initial request object. - response (google.ads.admanager_v1.types.ListContactsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = contact_service.ListContactsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[contact_service.ListContactsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[contact_service.Contact]: - for page in self.pages: - yield from page.contacts - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py deleted file mode 100644 index 52a4f962b295..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/contact_service/transports/rest.py +++ /dev/null @@ -1,526 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import dataclasses -import json # type: ignore -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.protobuf import json_format -import grpc # type: ignore -from requests import __version__ as requests_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.types import contact_service - -from .base import ContactServiceTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class ContactServiceRestInterceptor: - """Interceptor for ContactService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the ContactServiceRestTransport. - - .. code-block:: python - class MyCustomContactServiceInterceptor(ContactServiceRestInterceptor): - def pre_get_contact(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_contact(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_contacts(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_contacts(self, response): - logging.log(f"Received response: {response}") - return response - - transport = ContactServiceRestTransport(interceptor=MyCustomContactServiceInterceptor()) - client = ContactServiceClient(transport=transport) - - - """ - - def pre_get_contact( - self, - request: contact_service.GetContactRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[contact_service.GetContactRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_contact - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContactService server. - """ - return request, metadata - - def post_get_contact( - self, response: contact_service.Contact - ) -> contact_service.Contact: - """Post-rpc interceptor for get_contact - - Override in a subclass to manipulate the response - after it is returned by the ContactService server but before - it is returned to user code. - """ - return response - - def pre_list_contacts( - self, - request: contact_service.ListContactsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[contact_service.ListContactsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_contacts - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContactService server. - """ - return request, metadata - - def post_list_contacts( - self, response: contact_service.ListContactsResponse - ) -> contact_service.ListContactsResponse: - """Post-rpc interceptor for list_contacts - - Override in a subclass to manipulate the response - after it is returned by the ContactService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ContactService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the ContactService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class ContactServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: ContactServiceRestInterceptor - - -class ContactServiceRestTransport(ContactServiceTransport): - """REST backend transport for ContactService. - - Provides methods for handling Contact objects. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__( - self, - *, - host: str = "admanager.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[ContactServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or ContactServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _GetContact(ContactServiceRestStub): - def __hash__(self): - return hash("GetContact") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: contact_service.GetContactRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> contact_service.Contact: - r"""Call the get contact method over HTTP. - - Args: - request (~.contact_service.GetContactRequest): - The request object. Request object for GetContact method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.contact_service.Contact: - The Contact resource. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/contacts/*}", - }, - ] - request, metadata = self._interceptor.pre_get_contact(request, metadata) - pb_request = contact_service.GetContactRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = contact_service.Contact() - pb_resp = contact_service.Contact.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_contact(resp) - return resp - - class _ListContacts(ContactServiceRestStub): - def __hash__(self): - return hash("ListContacts") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: contact_service.ListContactsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> contact_service.ListContactsResponse: - r"""Call the list contacts method over HTTP. - - Args: - request (~.contact_service.ListContactsRequest): - The request object. Request object for ListContacts - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.contact_service.ListContactsResponse: - Response object for - ListContactsRequest containing matching - Contact resources. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=networks/*}/contacts", - }, - ] - request, metadata = self._interceptor.pre_list_contacts(request, metadata) - pb_request = contact_service.ListContactsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = contact_service.ListContactsResponse() - pb_resp = contact_service.ListContactsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_contacts(resp) - return resp - - @property - def get_contact( - self, - ) -> Callable[[contact_service.GetContactRequest], contact_service.Contact]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetContact(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_contacts( - self, - ) -> Callable[ - [contact_service.ListContactsRequest], contact_service.ListContactsResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListContacts(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(ContactServiceRestStub): - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", - }, - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("ContactServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py deleted file mode 100644 index 33aa7085f6e5..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/rest.py +++ /dev/null @@ -1,527 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import dataclasses -import json # type: ignore -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.protobuf import json_format -import grpc # type: ignore -from requests import __version__ as requests_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.types import creative_service - -from .base import CreativeServiceTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class CreativeServiceRestInterceptor: - """Interceptor for CreativeService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the CreativeServiceRestTransport. - - .. code-block:: python - class MyCustomCreativeServiceInterceptor(CreativeServiceRestInterceptor): - def pre_get_creative(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_creative(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_creatives(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_creatives(self, response): - logging.log(f"Received response: {response}") - return response - - transport = CreativeServiceRestTransport(interceptor=MyCustomCreativeServiceInterceptor()) - client = CreativeServiceClient(transport=transport) - - - """ - - def pre_get_creative( - self, - request: creative_service.GetCreativeRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[creative_service.GetCreativeRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_creative - - Override in a subclass to manipulate the request or metadata - before they are sent to the CreativeService server. - """ - return request, metadata - - def post_get_creative( - self, response: creative_service.Creative - ) -> creative_service.Creative: - """Post-rpc interceptor for get_creative - - Override in a subclass to manipulate the response - after it is returned by the CreativeService server but before - it is returned to user code. - """ - return response - - def pre_list_creatives( - self, - request: creative_service.ListCreativesRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[creative_service.ListCreativesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_creatives - - Override in a subclass to manipulate the request or metadata - before they are sent to the CreativeService server. - """ - return request, metadata - - def post_list_creatives( - self, response: creative_service.ListCreativesResponse - ) -> creative_service.ListCreativesResponse: - """Post-rpc interceptor for list_creatives - - Override in a subclass to manipulate the response - after it is returned by the CreativeService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CreativeService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the CreativeService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class CreativeServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: CreativeServiceRestInterceptor - - -class CreativeServiceRestTransport(CreativeServiceTransport): - """REST backend transport for CreativeService. - - Provides methods for handling Creative objects. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__( - self, - *, - host: str = "admanager.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[CreativeServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or CreativeServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _GetCreative(CreativeServiceRestStub): - def __hash__(self): - return hash("GetCreative") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: creative_service.GetCreativeRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> creative_service.Creative: - r"""Call the get creative method over HTTP. - - Args: - request (~.creative_service.GetCreativeRequest): - The request object. Request object for GetCreative - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.creative_service.Creative: - The Creative resource. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/creatives/*}", - }, - ] - request, metadata = self._interceptor.pre_get_creative(request, metadata) - pb_request = creative_service.GetCreativeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = creative_service.Creative() - pb_resp = creative_service.Creative.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_creative(resp) - return resp - - class _ListCreatives(CreativeServiceRestStub): - def __hash__(self): - return hash("ListCreatives") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: creative_service.ListCreativesRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> creative_service.ListCreativesResponse: - r"""Call the list creatives method over HTTP. - - Args: - request (~.creative_service.ListCreativesRequest): - The request object. Request object for ListCreatives - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.creative_service.ListCreativesResponse: - Response object for - ListCreativesRequest containing matching - Creative resources. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=networks/*}/creatives", - }, - ] - request, metadata = self._interceptor.pre_list_creatives(request, metadata) - pb_request = creative_service.ListCreativesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = creative_service.ListCreativesResponse() - pb_resp = creative_service.ListCreativesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_creatives(resp) - return resp - - @property - def get_creative( - self, - ) -> Callable[[creative_service.GetCreativeRequest], creative_service.Creative]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetCreative(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_creatives( - self, - ) -> Callable[ - [creative_service.ListCreativesRequest], creative_service.ListCreativesResponse - ]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListCreatives(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(CreativeServiceRestStub): - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", - }, - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("CreativeServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py index 4b1cd58b89f4..986a135d17c5 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py @@ -51,7 +51,11 @@ from google.longrunning import operations_pb2 # type: ignore from google.ads.admanager_v1.services.custom_field_service import pagers -from google.ads.admanager_v1.types import custom_field_enums, custom_field_service +from google.ads.admanager_v1.types import ( + custom_field_enums, + custom_field_messages, + custom_field_service, +) from .transports.base import DEFAULT_CLIENT_INFO, CustomFieldServiceTransport from .transports.rest import CustomFieldServiceRestTransport @@ -702,7 +706,7 @@ def get_custom_field( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_field_service.CustomField: + ) -> custom_field_messages.CustomField: r"""API to retrieve a ``CustomField`` object. .. code-block:: python @@ -749,7 +753,9 @@ def sample_get_custom_field(): Returns: google.ads.admanager_v1.types.CustomField: - The CustomField resource. + An additional, user-created field on + an entity. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py index f0a4e63f1c52..b11c6be336cc 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import custom_field_service +from google.ads.admanager_v1.types import custom_field_messages, custom_field_service class ListCustomFieldsPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[custom_field_service.ListCustomFieldsResponse]: ) yield self._response - def __iter__(self) -> Iterator[custom_field_service.CustomField]: + def __iter__(self) -> Iterator[custom_field_messages.CustomField]: for page in self.pages: yield from page.custom_fields diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py index 3578065cdf3f..97f76ac909c3 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import custom_field_service +from google.ads.admanager_v1.types import custom_field_messages, custom_field_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -156,8 +156,8 @@ def get_custom_field( ) -> Callable[ [custom_field_service.GetCustomFieldRequest], Union[ - custom_field_service.CustomField, - Awaitable[custom_field_service.CustomField], + custom_field_messages.CustomField, + Awaitable[custom_field_messages.CustomField], ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py index 0da154858e92..4994a3e75121 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import custom_field_service +from google.ads.admanager_v1.types import custom_field_messages, custom_field_service from .base import CustomFieldServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -100,8 +100,8 @@ def pre_get_custom_field( return request, metadata def post_get_custom_field( - self, response: custom_field_service.CustomField - ) -> custom_field_service.CustomField: + self, response: custom_field_messages.CustomField + ) -> custom_field_messages.CustomField: """Post-rpc interceptor for get_custom_field Override in a subclass to manipulate the response @@ -274,7 +274,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_field_service.CustomField: + ) -> custom_field_messages.CustomField: r"""Call the get custom field method over HTTP. Args: @@ -287,8 +287,10 @@ def __call__( sent along with the request as metadata. Returns: - ~.custom_field_service.CustomField: - The ``CustomField`` resource. + ~.custom_field_messages.CustomField: + An additional, user-created field on + an entity. + """ http_options: List[Dict[str, str]] = [ @@ -333,8 +335,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = custom_field_service.CustomField() - pb_resp = custom_field_service.CustomField.pb(resp) + resp = custom_field_messages.CustomField() + pb_resp = custom_field_messages.CustomField.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_field(resp) @@ -433,7 +435,7 @@ def __call__( def get_custom_field( self, ) -> Callable[ - [custom_field_service.GetCustomFieldRequest], custom_field_service.CustomField + [custom_field_service.GetCustomFieldRequest], custom_field_messages.CustomField ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -481,11 +483,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py index 53ee2f5439d2..63992825ffe4 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py @@ -53,6 +53,7 @@ from google.ads.admanager_v1.services.custom_targeting_key_service import pagers from google.ads.admanager_v1.types import ( custom_targeting_key_enums, + custom_targeting_key_messages, custom_targeting_key_service, ) @@ -712,7 +713,7 @@ def get_custom_targeting_key( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_key_service.CustomTargetingKey: + ) -> custom_targeting_key_messages.CustomTargetingKey: r"""API to retrieve a ``CustomTargetingKey`` object. .. code-block:: python diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py index e56ff58da48c..88953ea7950c 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/pagers.py @@ -38,7 +38,10 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import custom_targeting_key_service +from google.ads.admanager_v1.types import ( + custom_targeting_key_messages, + custom_targeting_key_service, +) class ListCustomTargetingKeysPager: @@ -113,7 +116,7 @@ def pages( ) yield self._response - def __iter__(self) -> Iterator[custom_targeting_key_service.CustomTargetingKey]: + def __iter__(self) -> Iterator[custom_targeting_key_messages.CustomTargetingKey]: for page in self.pages: yield from page.custom_targeting_keys diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py index a55f7a97d634..7e4925dd049f 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/base.py @@ -26,7 +26,10 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import custom_targeting_key_service +from google.ads.admanager_v1.types import ( + custom_targeting_key_messages, + custom_targeting_key_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -156,8 +159,8 @@ def get_custom_targeting_key( ) -> Callable[ [custom_targeting_key_service.GetCustomTargetingKeyRequest], Union[ - custom_targeting_key_service.CustomTargetingKey, - Awaitable[custom_targeting_key_service.CustomTargetingKey], + custom_targeting_key_messages.CustomTargetingKey, + Awaitable[custom_targeting_key_messages.CustomTargetingKey], ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py index 5ea81bb49e6c..6b9540dc0b60 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py @@ -38,7 +38,10 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import custom_targeting_key_service +from google.ads.admanager_v1.types import ( + custom_targeting_key_messages, + custom_targeting_key_service, +) from .base import CustomTargetingKeyServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -103,8 +106,8 @@ def pre_get_custom_targeting_key( return request, metadata def post_get_custom_targeting_key( - self, response: custom_targeting_key_service.CustomTargetingKey - ) -> custom_targeting_key_service.CustomTargetingKey: + self, response: custom_targeting_key_messages.CustomTargetingKey + ) -> custom_targeting_key_messages.CustomTargetingKey: """Post-rpc interceptor for get_custom_targeting_key Override in a subclass to manipulate the response @@ -280,7 +283,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_key_service.CustomTargetingKey: + ) -> custom_targeting_key_messages.CustomTargetingKey: r"""Call the get custom targeting key method over HTTP. Args: @@ -293,7 +296,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.custom_targeting_key_service.CustomTargetingKey: + ~.custom_targeting_key_messages.CustomTargetingKey: The ``CustomTargetingKey`` resource. """ @@ -341,8 +344,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = custom_targeting_key_service.CustomTargetingKey() - pb_resp = custom_targeting_key_service.CustomTargetingKey.pb(resp) + resp = custom_targeting_key_messages.CustomTargetingKey() + pb_resp = custom_targeting_key_messages.CustomTargetingKey.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_targeting_key(resp) @@ -447,7 +450,7 @@ def get_custom_targeting_key( self, ) -> Callable[ [custom_targeting_key_service.GetCustomTargetingKeyRequest], - custom_targeting_key_service.CustomTargetingKey, + custom_targeting_key_messages.CustomTargetingKey, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -495,11 +498,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py index 2a1a0435b1c9..6c03f1fb4c53 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py @@ -53,6 +53,7 @@ from google.ads.admanager_v1.services.custom_targeting_value_service import pagers from google.ads.admanager_v1.types import ( custom_targeting_value_enums, + custom_targeting_value_messages, custom_targeting_value_service, ) @@ -723,7 +724,7 @@ def get_custom_targeting_value( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_value_service.CustomTargetingValue: + ) -> custom_targeting_value_messages.CustomTargetingValue: r"""API to retrieve a ``CustomTargetingValue`` object. .. code-block:: python diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py index 09ef836cdb72..214d53becdec 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/pagers.py @@ -38,7 +38,10 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import custom_targeting_value_service +from google.ads.admanager_v1.types import ( + custom_targeting_value_messages, + custom_targeting_value_service, +) class ListCustomTargetingValuesPager: @@ -113,7 +116,9 @@ def pages( ) yield self._response - def __iter__(self) -> Iterator[custom_targeting_value_service.CustomTargetingValue]: + def __iter__( + self, + ) -> Iterator[custom_targeting_value_messages.CustomTargetingValue]: for page in self.pages: yield from page.custom_targeting_values diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py index a9d002f465f7..0ed99f654001 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/base.py @@ -26,7 +26,10 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import custom_targeting_value_service +from google.ads.admanager_v1.types import ( + custom_targeting_value_messages, + custom_targeting_value_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -156,8 +159,8 @@ def get_custom_targeting_value( ) -> Callable[ [custom_targeting_value_service.GetCustomTargetingValueRequest], Union[ - custom_targeting_value_service.CustomTargetingValue, - Awaitable[custom_targeting_value_service.CustomTargetingValue], + custom_targeting_value_messages.CustomTargetingValue, + Awaitable[custom_targeting_value_messages.CustomTargetingValue], ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py index 1154a1bebe68..4706f5043211 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py @@ -38,7 +38,10 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import custom_targeting_value_service +from google.ads.admanager_v1.types import ( + custom_targeting_value_messages, + custom_targeting_value_service, +) from .base import CustomTargetingValueServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -103,8 +106,8 @@ def pre_get_custom_targeting_value( return request, metadata def post_get_custom_targeting_value( - self, response: custom_targeting_value_service.CustomTargetingValue - ) -> custom_targeting_value_service.CustomTargetingValue: + self, response: custom_targeting_value_messages.CustomTargetingValue + ) -> custom_targeting_value_messages.CustomTargetingValue: """Post-rpc interceptor for get_custom_targeting_value Override in a subclass to manipulate the response @@ -280,7 +283,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> custom_targeting_value_service.CustomTargetingValue: + ) -> custom_targeting_value_messages.CustomTargetingValue: r"""Call the get custom targeting value method over HTTP. @@ -294,7 +297,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.custom_targeting_value_service.CustomTargetingValue: + ~.custom_targeting_value_messages.CustomTargetingValue: The ``CustomTargetingValue`` resource. """ @@ -344,8 +347,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = custom_targeting_value_service.CustomTargetingValue() - pb_resp = custom_targeting_value_service.CustomTargetingValue.pb(resp) + resp = custom_targeting_value_messages.CustomTargetingValue() + pb_resp = custom_targeting_value_messages.CustomTargetingValue.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_targeting_value(resp) @@ -454,7 +457,7 @@ def get_custom_targeting_value( self, ) -> Callable[ [custom_targeting_value_service.GetCustomTargetingValueRequest], - custom_targeting_value_service.CustomTargetingValue, + custom_targeting_value_messages.CustomTargetingValue, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast @@ -502,11 +505,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/__init__.py new file mode 100644 index 000000000000..3b03f6d3f9dc --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import EntitySignalsMappingServiceClient + +__all__ = ("EntitySignalsMappingServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py new file mode 100644 index 000000000000..fe94c89d85d2 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py @@ -0,0 +1,1559 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ads.admanager_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.ads.admanager_v1.services.entity_signals_mapping_service import pagers +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) + +from .transports.base import DEFAULT_CLIENT_INFO, EntitySignalsMappingServiceTransport +from .transports.rest import EntitySignalsMappingServiceRestTransport + + +class EntitySignalsMappingServiceClientMeta(type): + """Metaclass for the EntitySignalsMappingService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[EntitySignalsMappingServiceTransport]] + _transport_registry["rest"] = EntitySignalsMappingServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[EntitySignalsMappingServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class EntitySignalsMappingServiceClient( + metaclass=EntitySignalsMappingServiceClientMeta +): + """Provides methods for handling ``EntitySignalsMapping`` objects.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "admanager.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "admanager.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EntitySignalsMappingServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EntitySignalsMappingServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> EntitySignalsMappingServiceTransport: + """Returns the transport used by the client instance. + + Returns: + EntitySignalsMappingServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def entity_signals_mapping_path( + network_code: str, + entity_signals_mapping: str, + ) -> str: + """Returns a fully-qualified entity_signals_mapping string.""" + return "networks/{network_code}/entitySignalsMappings/{entity_signals_mapping}".format( + network_code=network_code, + entity_signals_mapping=entity_signals_mapping, + ) + + @staticmethod + def parse_entity_signals_mapping_path(path: str) -> Dict[str, str]: + """Parses a entity_signals_mapping path into its component segments.""" + m = re.match( + r"^networks/(?P.+?)/entitySignalsMappings/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + network_code: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "networks/{network_code}".format( + network_code=network_code, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match(r"^networks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = EntitySignalsMappingServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + EntitySignalsMappingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or EntitySignalsMappingServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + EntitySignalsMappingServiceTransport, + Callable[..., EntitySignalsMappingServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the entity signals mapping service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,EntitySignalsMappingServiceTransport,Callable[..., EntitySignalsMappingServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the EntitySignalsMappingServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = EntitySignalsMappingServiceClient._read_environment_variables() + self._client_cert_source = ( + EntitySignalsMappingServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = EntitySignalsMappingServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, EntitySignalsMappingServiceTransport) + if transport_provided: + # transport is a EntitySignalsMappingServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(EntitySignalsMappingServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or EntitySignalsMappingServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[EntitySignalsMappingServiceTransport], + Callable[..., EntitySignalsMappingServiceTransport], + ] = ( + EntitySignalsMappingServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., EntitySignalsMappingServiceTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get_entity_signals_mapping( + self, + request: Optional[ + Union[entity_signals_mapping_service.GetEntitySignalsMappingRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""API to retrieve a ``EntitySignalsMapping`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_get_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + request = admanager_v1.GetEntitySignalsMappingRequest( + name="name_value", + ) + + # Make the request + response = client.get_entity_signals_mapping(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.GetEntitySignalsMappingRequest, dict]): + The request object. Request object for ``GetEntitySignalsMapping`` method. + name (str): + Required. The resource name of the EntitySignalsMapping. + Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.EntitySignalsMapping: + The EntitySignalsMapping resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, entity_signals_mapping_service.GetEntitySignalsMappingRequest + ): + request = entity_signals_mapping_service.GetEntitySignalsMappingRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_entity_signals_mapping + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_entity_signals_mappings( + self, + request: Optional[ + Union[entity_signals_mapping_service.ListEntitySignalsMappingsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntitySignalsMappingsPager: + r"""API to retrieve a list of ``EntitySignalsMapping`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_list_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + request = admanager_v1.ListEntitySignalsMappingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entity_signals_mappings(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.ListEntitySignalsMappingsRequest, dict]): + The request object. Request object for ``ListEntitySignalsMappings`` method. + parent (str): + Required. The parent, which owns this collection of + EntitySignalsMappings. Format: + ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.services.entity_signals_mapping_service.pagers.ListEntitySignalsMappingsPager: + Response object for ListEntitySignalsMappingsRequest containing matching + EntitySignalsMapping resources. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, entity_signals_mapping_service.ListEntitySignalsMappingsRequest + ): + request = entity_signals_mapping_service.ListEntitySignalsMappingsRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_entity_signals_mappings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntitySignalsMappingsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_entity_signals_mapping( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest, dict + ] + ] = None, + *, + parent: Optional[str] = None, + entity_signals_mapping: Optional[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""API to create an ``EntitySignalsMapping`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_create_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + entity_signals_mapping = admanager_v1.EntitySignalsMapping() + entity_signals_mapping.audience_segment_id = 1980 + entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.CreateEntitySignalsMappingRequest( + parent="parent_value", + entity_signals_mapping=entity_signals_mapping, + ) + + # Make the request + response = client.create_entity_signals_mapping(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest, dict]): + The request object. Request object for + 'CreateEntitySignalsMapping' method. + parent (str): + Required. The parent resource where this + EntitySignalsMapping will be created. Format: + ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity_signals_mapping (google.ads.admanager_v1.types.EntitySignalsMapping): + Required. The EntitySignalsMapping + object to create. + + This corresponds to the ``entity_signals_mapping`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.EntitySignalsMapping: + The EntitySignalsMapping resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entity_signals_mapping]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, entity_signals_mapping_service.CreateEntitySignalsMappingRequest + ): + request = entity_signals_mapping_service.CreateEntitySignalsMappingRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entity_signals_mapping is not None: + request.entity_signals_mapping = entity_signals_mapping + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_entity_signals_mapping + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_entity_signals_mapping( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, dict + ] + ] = None, + *, + entity_signals_mapping: Optional[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""API to update an ``EntitySignalsMapping`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_update_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + entity_signals_mapping = admanager_v1.EntitySignalsMapping() + entity_signals_mapping.audience_segment_id = 1980 + entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.UpdateEntitySignalsMappingRequest( + entity_signals_mapping=entity_signals_mapping, + ) + + # Make the request + response = client.update_entity_signals_mapping(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest, dict]): + The request object. Request object for + 'UpdateEntitySignalsMapping' method. + entity_signals_mapping (google.ads.admanager_v1.types.EntitySignalsMapping): + Required. The ``EntitySignalsMapping`` to update. + + The EntitySignalsMapping's name is used to identify the + EntitySignalsMapping to update. Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping}`` + + This corresponds to the ``entity_signals_mapping`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.EntitySignalsMapping: + The EntitySignalsMapping resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entity_signals_mapping, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, entity_signals_mapping_service.UpdateEntitySignalsMappingRequest + ): + request = entity_signals_mapping_service.UpdateEntitySignalsMappingRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entity_signals_mapping is not None: + request.entity_signals_mapping = entity_signals_mapping + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_entity_signals_mapping + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("entity_signals_mapping.name", request.entity_signals_mapping.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_create_entity_signals_mappings( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest + ] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + r"""API to batch create ``EntitySignalsMapping`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_batch_create_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + requests = admanager_v1.CreateEntitySignalsMappingRequest() + requests.parent = "parent_value" + requests.entity_signals_mapping.audience_segment_id = 1980 + requests.entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.BatchCreateEntitySignalsMappingsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_create_entity_signals_mappings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.BatchCreateEntitySignalsMappingsRequest, dict]): + The request object. Request object for ``BatchCreateEntitySignalsMappings`` + method. + parent (str): + Required. The parent resource where + ``EntitySignalsMappings`` will be created. Format: + ``networks/{network_code}`` The parent field in the + CreateEntitySignalsMappingRequest must match this field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest]): + Required. The ``EntitySignalsMapping`` objects to + create. A maximum of 100 objects can be created in a + batch. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.BatchCreateEntitySignalsMappingsResponse: + Response object for BatchCreateEntitySignalsMappings + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + ): + request = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_create_entity_signals_mappings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_update_entity_signals_mappings( + self, + request: Optional[ + Union[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest + ] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + r"""API to batch update ``EntitySignalsMapping`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_batch_update_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + requests = admanager_v1.UpdateEntitySignalsMappingRequest() + requests.entity_signals_mapping.audience_segment_id = 1980 + requests.entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.BatchUpdateEntitySignalsMappingsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_update_entity_signals_mappings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.BatchUpdateEntitySignalsMappingsRequest, dict]): + The request object. Request object for ``BatchUpdateEntitySignalsMappings`` + method. + parent (str): + Required. The parent resource where + ``EntitySignalsMappings`` will be updated. Format: + ``networks/{network_code}`` The parent field in the + UpdateEntitySignalsMappingRequest must match this field. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest]): + Required. The ``EntitySignalsMapping`` objects to + update. A maximum of 100 objects can be updated in a + batch. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.BatchUpdateEntitySignalsMappingsResponse: + Response object for BatchUpdateEntitySignalsMappings + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + ): + request = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_update_entity_signals_mappings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "EntitySignalsMappingServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("EntitySignalsMappingServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/pagers.py similarity index 69% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/pagers.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/pagers.py index fda9c7f48045..464c0fe8d515 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/pagers.py @@ -38,32 +38,37 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import creative_service +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) -class ListCreativesPager: - """A pager for iterating through ``list_creatives`` requests. +class ListEntitySignalsMappingsPager: + """A pager for iterating through ``list_entity_signals_mappings`` requests. This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListCreativesResponse` object, and + :class:`google.ads.admanager_v1.types.ListEntitySignalsMappingsResponse` object, and provides an ``__iter__`` method to iterate through its - ``creatives`` field. + ``entity_signals_mappings`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListCreatives`` requests and continue to iterate - through the ``creatives`` field on the + ``ListEntitySignalsMappings`` requests and continue to iterate + through the ``entity_signals_mappings`` field on the corresponding responses. - All the usual :class:`google.ads.admanager_v1.types.ListCreativesResponse` + All the usual :class:`google.ads.admanager_v1.types.ListEntitySignalsMappingsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., creative_service.ListCreativesResponse], - request: creative_service.ListCreativesRequest, - response: creative_service.ListCreativesResponse, + method: Callable[ + ..., entity_signals_mapping_service.ListEntitySignalsMappingsResponse + ], + request: entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + response: entity_signals_mapping_service.ListEntitySignalsMappingsResponse, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -74,9 +79,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.ads.admanager_v1.types.ListCreativesRequest): + request (google.ads.admanager_v1.types.ListEntitySignalsMappingsRequest): The initial request object. - response (google.ads.admanager_v1.types.ListCreativesResponse): + response (google.ads.admanager_v1.types.ListEntitySignalsMappingsResponse): The initial response object. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -85,7 +90,9 @@ def __init__( sent along with the request as metadata. """ self._method = method - self._request = creative_service.ListCreativesRequest(request) + self._request = entity_signals_mapping_service.ListEntitySignalsMappingsRequest( + request + ) self._response = response self._retry = retry self._timeout = timeout @@ -95,7 +102,9 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[creative_service.ListCreativesResponse]: + def pages( + self, + ) -> Iterator[entity_signals_mapping_service.ListEntitySignalsMappingsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -107,9 +116,11 @@ def pages(self) -> Iterator[creative_service.ListCreativesResponse]: ) yield self._response - def __iter__(self) -> Iterator[creative_service.Creative]: + def __iter__( + self, + ) -> Iterator[entity_signals_mapping_messages.EntitySignalsMapping]: for page in self.pages: - yield from page.creatives + yield from page.entity_signals_mappings def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/__init__.py similarity index 60% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/__init__.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/__init__.py index 0cdd254a8628..a842b7667625 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/transports/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/__init__.py @@ -16,15 +16,20 @@ from collections import OrderedDict from typing import Dict, Type -from .base import TeamServiceTransport -from .rest import TeamServiceRestInterceptor, TeamServiceRestTransport +from .base import EntitySignalsMappingServiceTransport +from .rest import ( + EntitySignalsMappingServiceRestInterceptor, + EntitySignalsMappingServiceRestTransport, +) # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[TeamServiceTransport]] -_transport_registry["rest"] = TeamServiceRestTransport +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[EntitySignalsMappingServiceTransport]] +_transport_registry["rest"] = EntitySignalsMappingServiceRestTransport __all__ = ( - "TeamServiceTransport", - "TeamServiceRestTransport", - "TeamServiceRestInterceptor", + "EntitySignalsMappingServiceTransport", + "EntitySignalsMappingServiceRestTransport", + "EntitySignalsMappingServiceRestInterceptor", ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/base.py new file mode 100644 index 000000000000..cc29ed2e8641 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/base.py @@ -0,0 +1,266 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ads.admanager_v1 import gapic_version as package_version +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class EntitySignalsMappingServiceTransport(abc.ABC): + """Abstract transport class for EntitySignalsMappingService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "admanager.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'admanager.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_entity_signals_mapping: gapic_v1.method.wrap_method( + self.get_entity_signals_mapping, + default_timeout=None, + client_info=client_info, + ), + self.list_entity_signals_mappings: gapic_v1.method.wrap_method( + self.list_entity_signals_mappings, + default_timeout=None, + client_info=client_info, + ), + self.create_entity_signals_mapping: gapic_v1.method.wrap_method( + self.create_entity_signals_mapping, + default_timeout=None, + client_info=client_info, + ), + self.update_entity_signals_mapping: gapic_v1.method.wrap_method( + self.update_entity_signals_mapping, + default_timeout=None, + client_info=client_info, + ), + self.batch_create_entity_signals_mappings: gapic_v1.method.wrap_method( + self.batch_create_entity_signals_mappings, + default_timeout=None, + client_info=client_info, + ), + self.batch_update_entity_signals_mappings: gapic_v1.method.wrap_method( + self.batch_update_entity_signals_mappings, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.GetEntitySignalsMappingRequest], + Union[ + entity_signals_mapping_messages.EntitySignalsMapping, + Awaitable[entity_signals_mapping_messages.EntitySignalsMapping], + ], + ]: + raise NotImplementedError() + + @property + def list_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.ListEntitySignalsMappingsRequest], + Union[ + entity_signals_mapping_service.ListEntitySignalsMappingsResponse, + Awaitable[entity_signals_mapping_service.ListEntitySignalsMappingsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.CreateEntitySignalsMappingRequest], + Union[ + entity_signals_mapping_messages.EntitySignalsMapping, + Awaitable[entity_signals_mapping_messages.EntitySignalsMapping], + ], + ]: + raise NotImplementedError() + + @property + def update_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.UpdateEntitySignalsMappingRequest], + Union[ + entity_signals_mapping_messages.EntitySignalsMapping, + Awaitable[entity_signals_mapping_messages.EntitySignalsMapping], + ], + ]: + raise NotImplementedError() + + @property + def batch_create_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest], + Union[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + Awaitable[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse + ], + ], + ]: + raise NotImplementedError() + + @property + def batch_update_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest], + Union[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + Awaitable[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse + ], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("EntitySignalsMappingServiceTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py new file mode 100644 index 000000000000..a6e1199ef6ef --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py @@ -0,0 +1,1153 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import EntitySignalsMappingServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class EntitySignalsMappingServiceRestInterceptor: + """Interceptor for EntitySignalsMappingService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the EntitySignalsMappingServiceRestTransport. + + .. code-block:: python + class MyCustomEntitySignalsMappingServiceInterceptor(EntitySignalsMappingServiceRestInterceptor): + def pre_batch_create_entity_signals_mappings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_entity_signals_mappings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_update_entity_signals_mappings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_update_entity_signals_mappings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_entity_signals_mapping(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_entity_signals_mapping(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_entity_signals_mapping(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_entity_signals_mapping(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_entity_signals_mappings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_entity_signals_mappings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_entity_signals_mapping(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_entity_signals_mapping(self, response): + logging.log(f"Received response: {response}") + return response + + transport = EntitySignalsMappingServiceRestTransport(interceptor=MyCustomEntitySignalsMappingServiceInterceptor()) + client = EntitySignalsMappingServiceClient(transport=transport) + + + """ + + def pre_batch_create_entity_signals_mappings( + self, + request: entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for batch_create_entity_signals_mappings + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_batch_create_entity_signals_mappings( + self, + response: entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + ) -> entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + """Post-rpc interceptor for batch_create_entity_signals_mappings + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_batch_update_entity_signals_mappings( + self, + request: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for batch_update_entity_signals_mappings + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_batch_update_entity_signals_mappings( + self, + response: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + ) -> entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + """Post-rpc interceptor for batch_update_entity_signals_mappings + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_create_entity_signals_mapping( + self, + request: entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_entity_signals_mapping + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_create_entity_signals_mapping( + self, response: entity_signals_mapping_messages.EntitySignalsMapping + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + """Post-rpc interceptor for create_entity_signals_mapping + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_get_entity_signals_mapping( + self, + request: entity_signals_mapping_service.GetEntitySignalsMappingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.GetEntitySignalsMappingRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for get_entity_signals_mapping + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_get_entity_signals_mapping( + self, response: entity_signals_mapping_messages.EntitySignalsMapping + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + """Post-rpc interceptor for get_entity_signals_mapping + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_list_entity_signals_mappings( + self, + request: entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_entity_signals_mappings + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_list_entity_signals_mappings( + self, response: entity_signals_mapping_service.ListEntitySignalsMappingsResponse + ) -> entity_signals_mapping_service.ListEntitySignalsMappingsResponse: + """Post-rpc interceptor for list_entity_signals_mappings + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_update_entity_signals_mapping( + self, + request: entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for update_entity_signals_mapping + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_update_entity_signals_mapping( + self, response: entity_signals_mapping_messages.EntitySignalsMapping + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + """Post-rpc interceptor for update_entity_signals_mapping + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the EntitySignalsMappingService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the EntitySignalsMappingService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class EntitySignalsMappingServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: EntitySignalsMappingServiceRestInterceptor + + +class EntitySignalsMappingServiceRestTransport(EntitySignalsMappingServiceTransport): + """REST backend transport for EntitySignalsMappingService. + + Provides methods for handling ``EntitySignalsMapping`` objects. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "admanager.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[EntitySignalsMappingServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'admanager.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or EntitySignalsMappingServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchCreateEntitySignalsMappings(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("BatchCreateEntitySignalsMappings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + r"""Call the batch create entity + signals mappings method over HTTP. + + Args: + request (~.entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest): + The request object. Request object for ``BatchCreateEntitySignalsMappings`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: + Response object for ``BatchCreateEntitySignalsMappings`` + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings:batchCreate", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_batch_create_entity_signals_mappings( + request, metadata + ) + pb_request = entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + pb_resp = entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_create_entity_signals_mappings(resp) + return resp + + class _BatchUpdateEntitySignalsMappings(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("BatchUpdateEntitySignalsMappings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + r"""Call the batch update entity + signals mappings method over HTTP. + + Args: + request (~.entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest): + The request object. Request object for ``BatchUpdateEntitySignalsMappings`` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: + Response object for ``BatchUpdateEntitySignalsMappings`` + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings:batchUpdate", + "body": "*", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_batch_update_entity_signals_mappings( + request, metadata + ) + pb_request = entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + pb_resp = entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_update_entity_signals_mappings(resp) + return resp + + class _CreateEntitySignalsMapping(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("CreateEntitySignalsMapping") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""Call the create entity signals + mapping method over HTTP. + + Args: + request (~.entity_signals_mapping_service.CreateEntitySignalsMappingRequest): + The request object. Request object for + 'CreateEntitySignalsMapping' method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_messages.EntitySignalsMapping: + The ``EntitySignalsMapping`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings", + "body": "entity_signals_mapping", + }, + ] + request, metadata = self._interceptor.pre_create_entity_signals_mapping( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.CreateEntitySignalsMappingRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_messages.EntitySignalsMapping() + pb_resp = entity_signals_mapping_messages.EntitySignalsMapping.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_entity_signals_mapping(resp) + return resp + + class _GetEntitySignalsMapping(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("GetEntitySignalsMapping") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.GetEntitySignalsMappingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""Call the get entity signals + mapping method over HTTP. + + Args: + request (~.entity_signals_mapping_service.GetEntitySignalsMappingRequest): + The request object. Request object for ``GetEntitySignalsMapping`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_messages.EntitySignalsMapping: + The ``EntitySignalsMapping`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=networks/*/entitySignalsMappings/*}", + }, + ] + request, metadata = self._interceptor.pre_get_entity_signals_mapping( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.GetEntitySignalsMappingRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_messages.EntitySignalsMapping() + pb_resp = entity_signals_mapping_messages.EntitySignalsMapping.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_entity_signals_mapping(resp) + return resp + + class _ListEntitySignalsMappings(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("ListEntitySignalsMappings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_service.ListEntitySignalsMappingsResponse: + r"""Call the list entity signals + mappings method over HTTP. + + Args: + request (~.entity_signals_mapping_service.ListEntitySignalsMappingsRequest): + The request object. Request object for ``ListEntitySignalsMappings`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_service.ListEntitySignalsMappingsResponse: + Response object for ``ListEntitySignalsMappingsRequest`` + containing matching ``EntitySignalsMapping`` resources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=networks/*}/entitySignalsMappings", + }, + ] + request, metadata = self._interceptor.pre_list_entity_signals_mappings( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.ListEntitySignalsMappingsRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_service.ListEntitySignalsMappingsResponse() + pb_resp = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.pb( + resp + ) + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_entity_signals_mappings(resp) + return resp + + class _UpdateEntitySignalsMapping(EntitySignalsMappingServiceRestStub): + def __hash__(self): + return hash("UpdateEntitySignalsMapping") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> entity_signals_mapping_messages.EntitySignalsMapping: + r"""Call the update entity signals + mapping method over HTTP. + + Args: + request (~.entity_signals_mapping_service.UpdateEntitySignalsMappingRequest): + The request object. Request object for + 'UpdateEntitySignalsMapping' method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.entity_signals_mapping_messages.EntitySignalsMapping: + The ``EntitySignalsMapping`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{entity_signals_mapping.name=networks/*/entitySignalsMappings/*}", + "body": "entity_signals_mapping", + }, + ] + request, metadata = self._interceptor.pre_update_entity_signals_mapping( + request, metadata + ) + pb_request = ( + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = entity_signals_mapping_messages.EntitySignalsMapping() + pb_resp = entity_signals_mapping_messages.EntitySignalsMapping.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_entity_signals_mapping(resp) + return resp + + @property + def batch_create_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest], + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateEntitySignalsMappings(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_update_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest], + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchUpdateEntitySignalsMappings(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.CreateEntitySignalsMappingRequest], + entity_signals_mapping_messages.EntitySignalsMapping, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEntitySignalsMapping(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.GetEntitySignalsMappingRequest], + entity_signals_mapping_messages.EntitySignalsMapping, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEntitySignalsMapping(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_entity_signals_mappings( + self, + ) -> Callable[ + [entity_signals_mapping_service.ListEntitySignalsMappingsRequest], + entity_signals_mapping_service.ListEntitySignalsMappingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEntitySignalsMappings(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_entity_signals_mapping( + self, + ) -> Callable[ + [entity_signals_mapping_service.UpdateEntitySignalsMappingRequest], + entity_signals_mapping_messages.EntitySignalsMapping, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateEntitySignalsMapping(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(EntitySignalsMappingServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + }, + { + "method": "get", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("EntitySignalsMappingServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/pagers.py deleted file mode 100644 index ba3ba0c00821..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/pagers.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Iterator, - Optional, - Sequence, - Tuple, - Union, -) - -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[ - retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None - ] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.ads.admanager_v1.types import label_service - - -class ListLabelsPager: - """A pager for iterating through ``list_labels`` requests. - - This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListLabelsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``labels`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListLabels`` requests and continue to iterate - through the ``labels`` field on the - corresponding responses. - - All the usual :class:`google.ads.admanager_v1.types.ListLabelsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., label_service.ListLabelsResponse], - request: label_service.ListLabelsRequest, - response: label_service.ListLabelsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.ads.admanager_v1.types.ListLabelsRequest): - The initial request object. - response (google.ads.admanager_v1.types.ListLabelsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = label_service.ListLabelsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[label_service.ListLabelsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[label_service.Label]: - for page in self.pages: - yield from page.labels - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/rest.py deleted file mode 100644 index 3bcda39fe9ab..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/rest.py +++ /dev/null @@ -1,520 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -import dataclasses -import json # type: ignore -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.protobuf import json_format -import grpc # type: ignore -from requests import __version__ as requests_version - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - - -from google.longrunning import operations_pb2 # type: ignore - -from google.ads.admanager_v1.types import label_service - -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from .base import LabelServiceTransport - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class LabelServiceRestInterceptor: - """Interceptor for LabelService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the LabelServiceRestTransport. - - .. code-block:: python - class MyCustomLabelServiceInterceptor(LabelServiceRestInterceptor): - def pre_get_label(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_label(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_labels(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_labels(self, response): - logging.log(f"Received response: {response}") - return response - - transport = LabelServiceRestTransport(interceptor=MyCustomLabelServiceInterceptor()) - client = LabelServiceClient(transport=transport) - - - """ - - def pre_get_label( - self, - request: label_service.GetLabelRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[label_service.GetLabelRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_label - - Override in a subclass to manipulate the request or metadata - before they are sent to the LabelService server. - """ - return request, metadata - - def post_get_label(self, response: label_service.Label) -> label_service.Label: - """Post-rpc interceptor for get_label - - Override in a subclass to manipulate the response - after it is returned by the LabelService server but before - it is returned to user code. - """ - return response - - def pre_list_labels( - self, - request: label_service.ListLabelsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[label_service.ListLabelsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_labels - - Override in a subclass to manipulate the request or metadata - before they are sent to the LabelService server. - """ - return request, metadata - - def post_list_labels( - self, response: label_service.ListLabelsResponse - ) -> label_service.ListLabelsResponse: - """Post-rpc interceptor for list_labels - - Override in a subclass to manipulate the response - after it is returned by the LabelService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the LabelService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the LabelService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class LabelServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: LabelServiceRestInterceptor - - -class LabelServiceRestTransport(LabelServiceTransport): - """REST backend transport for LabelService. - - Provides methods for handling Label objects. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__( - self, - *, - host: str = "admanager.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[LabelServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or LabelServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _GetLabel(LabelServiceRestStub): - def __hash__(self): - return hash("GetLabel") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: label_service.GetLabelRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> label_service.Label: - r"""Call the get label method over HTTP. - - Args: - request (~.label_service.GetLabelRequest): - The request object. Request object for GetLabel method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.label_service.Label: - The Label resource. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/labels/*}", - }, - ] - request, metadata = self._interceptor.pre_get_label(request, metadata) - pb_request = label_service.GetLabelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = label_service.Label() - pb_resp = label_service.Label.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_label(resp) - return resp - - class _ListLabels(LabelServiceRestStub): - def __hash__(self): - return hash("ListLabels") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: label_service.ListLabelsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> label_service.ListLabelsResponse: - r"""Call the list labels method over HTTP. - - Args: - request (~.label_service.ListLabelsRequest): - The request object. Request object for ListLabels method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.label_service.ListLabelsResponse: - Response object for ListLabelsRequest - containing matching Label resources. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=networks/*}/labels", - }, - ] - request, metadata = self._interceptor.pre_list_labels(request, metadata) - pb_request = label_service.ListLabelsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = label_service.ListLabelsResponse() - pb_resp = label_service.ListLabelsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_labels(resp) - return resp - - @property - def get_label( - self, - ) -> Callable[[label_service.GetLabelRequest], label_service.Label]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetLabel(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_labels( - self, - ) -> Callable[[label_service.ListLabelsRequest], label_service.ListLabelsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListLabels(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(LabelServiceRestStub): - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", - }, - { - "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("LabelServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/__init__.py deleted file mode 100644 index 7efb117bbdc9..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import LineItemServiceClient - -__all__ = ("LineItemServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/pagers.py deleted file mode 100644 index 7d60d7683956..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/pagers.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Iterator, - Optional, - Sequence, - Tuple, - Union, -) - -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[ - retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None - ] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.ads.admanager_v1.types import line_item_service - - -class ListLineItemsPager: - """A pager for iterating through ``list_line_items`` requests. - - This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListLineItemsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``line_items`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListLineItems`` requests and continue to iterate - through the ``line_items`` field on the - corresponding responses. - - All the usual :class:`google.ads.admanager_v1.types.ListLineItemsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., line_item_service.ListLineItemsResponse], - request: line_item_service.ListLineItemsRequest, - response: line_item_service.ListLineItemsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.ads.admanager_v1.types.ListLineItemsRequest): - The initial request object. - response (google.ads.admanager_v1.types.ListLineItemsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = line_item_service.ListLineItemsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[line_item_service.ListLineItemsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[line_item_service.LineItem]: - for page in self.pages: - yield from page.line_items - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/__init__.py deleted file mode 100644 index a187611acc17..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import LineItemServiceTransport -from .rest import LineItemServiceRestInterceptor, LineItemServiceRestTransport - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[LineItemServiceTransport]] -_transport_registry["rest"] = LineItemServiceRestTransport - -__all__ = ( - "LineItemServiceTransport", - "LineItemServiceRestTransport", - "LineItemServiceRestInterceptor", -) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/base.py deleted file mode 100644 index c893d9789ad1..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/transports/base.py +++ /dev/null @@ -1,188 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import line_item_service - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -class LineItemServiceTransport(abc.ABC): - """Abstract transport class for LineItemService.""" - - AUTH_SCOPES = () - - DEFAULT_HOST: str = "admanager.googleapis.com" - - def __init__( - self, - *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs( - "'credentials_file' and 'credentials' are mutually exclusive" - ) - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, **scopes_kwargs, quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default( - **scopes_kwargs, quota_project_id=quota_project_id - ) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience( - api_audience if api_audience else host - ) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if ( - always_use_jwt_access - and isinstance(credentials, service_account.Credentials) - and hasattr(service_account.Credentials, "with_always_use_jwt_access") - ): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.get_line_item: gapic_v1.method.wrap_method( - self.get_line_item, - default_timeout=None, - client_info=client_info, - ), - self.list_line_items: gapic_v1.method.wrap_method( - self.list_line_items, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def get_line_item( - self, - ) -> Callable[ - [line_item_service.GetLineItemRequest], - Union[line_item_service.LineItem, Awaitable[line_item_service.LineItem]], - ]: - raise NotImplementedError() - - @property - def list_line_items( - self, - ) -> Callable[ - [line_item_service.ListLineItemsRequest], - Union[ - line_item_service.ListLineItemsResponse, - Awaitable[line_item_service.ListLineItemsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ("LineItemServiceTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py index 6c6cc693983d..b863f446461e 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py @@ -50,7 +50,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import network_service +from google.ads.admanager_v1.types import network_messages, network_service from .transports.base import DEFAULT_CLIENT_INFO, NetworkServiceTransport from .transports.rest import NetworkServiceRestTransport @@ -692,7 +692,7 @@ def get_network( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> network_service.Network: + ) -> network_messages.Network: r"""API to retrieve a Network object. .. code-block:: python @@ -784,6 +784,79 @@ def sample_get_network(): # Done; return the response. return response + def list_networks( + self, + request: Optional[Union[network_service.ListNetworksRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> network_service.ListNetworksResponse: + r"""API to retrieve all the networks the current user has + access to. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_list_networks(): + # Create a client + client = admanager_v1.NetworkServiceClient() + + # Initialize request argument(s) + request = admanager_v1.ListNetworksRequest( + ) + + # Make the request + response = client.list_networks(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.ListNetworksRequest, dict]): + The request object. Request object for ``ListNetworks`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.ListNetworksResponse: + Response object for ListNetworks method. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, network_service.ListNetworksRequest): + request = network_service.ListNetworksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_networks] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "NetworkServiceClient": return self diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/base.py index ae9d00df7932..6f0ecf9e8ef9 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import network_service +from google.ads.admanager_v1.types import network_messages, network_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -134,6 +134,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_networks: gapic_v1.method.wrap_method( + self.list_networks, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -150,7 +155,19 @@ def get_network( self, ) -> Callable[ [network_service.GetNetworkRequest], - Union[network_service.Network, Awaitable[network_service.Network]], + Union[network_messages.Network, Awaitable[network_messages.Network]], + ]: + raise NotImplementedError() + + @property + def list_networks( + self, + ) -> Callable[ + [network_service.ListNetworksRequest], + Union[ + network_service.ListNetworksResponse, + Awaitable[network_service.ListNetworksResponse], + ], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/rest.py index 0bb7e4dcf9de..e5165660eb1c 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import network_service +from google.ads.admanager_v1.types import network_messages, network_service from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import NetworkServiceTransport @@ -73,6 +73,14 @@ def post_get_network(self, response): logging.log(f"Received response: {response}") return response + def pre_list_networks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_networks(self, response): + logging.log(f"Received response: {response}") + return response + transport = NetworkServiceRestTransport(interceptor=MyCustomNetworkServiceInterceptor()) client = NetworkServiceClient(transport=transport) @@ -92,8 +100,8 @@ def pre_get_network( return request, metadata def post_get_network( - self, response: network_service.Network - ) -> network_service.Network: + self, response: network_messages.Network + ) -> network_messages.Network: """Post-rpc interceptor for get_network Override in a subclass to manipulate the response @@ -102,6 +110,29 @@ def post_get_network( """ return response + def pre_list_networks( + self, + request: network_service.ListNetworksRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[network_service.ListNetworksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_networks + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkService server. + """ + return request, metadata + + def post_list_networks( + self, response: network_service.ListNetworksResponse + ) -> network_service.ListNetworksResponse: + """Post-rpc interceptor for list_networks + + Override in a subclass to manipulate the response + after it is returned by the NetworkService server but before + it is returned to user code. + """ + return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -243,7 +274,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> network_service.Network: + ) -> network_messages.Network: r"""Call the get network method over HTTP. Args: @@ -256,7 +287,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.network_service.Network: + ~.network_messages.Network: The Network resource. """ @@ -300,21 +331,105 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = network_service.Network() - pb_resp = network_service.Network.pb(resp) + resp = network_messages.Network() + pb_resp = network_messages.Network.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_network(resp) return resp + class _ListNetworks(NetworkServiceRestStub): + def __hash__(self): + return hash("ListNetworks") + + def __call__( + self, + request: network_service.ListNetworksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> network_service.ListNetworksResponse: + r"""Call the list networks method over HTTP. + + Args: + request (~.network_service.ListNetworksRequest): + The request object. Request object for ``ListNetworks`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.network_service.ListNetworksResponse: + Response object for ``ListNetworks`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/networks", + }, + ] + request, metadata = self._interceptor.pre_list_networks(request, metadata) + pb_request = network_service.ListNetworksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = network_service.ListNetworksResponse() + pb_resp = network_service.ListNetworksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_networks(resp) + return resp + @property def get_network( self, - ) -> Callable[[network_service.GetNetworkRequest], network_service.Network]: + ) -> Callable[[network_service.GetNetworkRequest], network_messages.Network]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetNetwork(self._session, self._host, self._interceptor) # type: ignore + @property + def list_networks( + self, + ) -> Callable[ + [network_service.ListNetworksRequest], network_service.ListNetworksResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListNetworks(self._session, self._host, self._interceptor) # type: ignore + @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -346,11 +461,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py index 98c15bafc0f8..018b9add869d 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py @@ -52,7 +52,13 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.ads.admanager_v1.services.order_service import pagers -from google.ads.admanager_v1.types import applied_label, order_service +from google.ads.admanager_v1.types import ( + applied_label, + custom_field_value, + order_enums, + order_messages, + order_service, +) from .transports.base import DEFAULT_CLIENT_INFO, OrderServiceTransport from .transports.rest import OrderServiceRestTransport @@ -218,6 +224,25 @@ def parse_contact_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def custom_field_path( + network_code: str, + custom_field: str, + ) -> str: + """Returns a fully-qualified custom_field string.""" + return "networks/{network_code}/customFields/{custom_field}".format( + network_code=network_code, + custom_field=custom_field, + ) + + @staticmethod + def parse_custom_field_path(path: str) -> Dict[str, str]: + """Parses a custom_field path into its component segments.""" + m = re.match( + r"^networks/(?P.+?)/customFields/(?P.+?)$", path + ) + return m.groupdict() if m else {} + @staticmethod def label_path( network_code: str, @@ -778,7 +803,7 @@ def get_order( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> order_service.Order: + ) -> order_messages.Order: r"""API to retrieve an Order object. .. code-block:: python diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/pagers.py index e4a153d12691..6990818382ad 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import order_service +from google.ads.admanager_v1.types import order_messages, order_service class ListOrdersPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[order_service.ListOrdersResponse]: ) yield self._response - def __iter__(self) -> Iterator[order_service.Order]: + def __iter__(self) -> Iterator[order_messages.Order]: for page in self.pages: yield from page.orders diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/base.py index 1c19f3691180..f2a3399ce763 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import order_service +from google.ads.admanager_v1.types import order_messages, order_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -155,7 +155,7 @@ def get_order( self, ) -> Callable[ [order_service.GetOrderRequest], - Union[order_service.Order, Awaitable[order_service.Order]], + Union[order_messages.Order, Awaitable[order_messages.Order]], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/rest.py index bbcd76b93d43..ee56c744e7a9 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import order_service +from google.ads.admanager_v1.types import order_messages, order_service from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import OrderServiceTransport @@ -99,7 +99,7 @@ def pre_get_order( """ return request, metadata - def post_get_order(self, response: order_service.Order) -> order_service.Order: + def post_get_order(self, response: order_messages.Order) -> order_messages.Order: """Post-rpc interceptor for get_order Override in a subclass to manipulate the response @@ -272,7 +272,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> order_service.Order: + ) -> order_messages.Order: r"""Call the get order method over HTTP. Args: @@ -285,7 +285,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.order_service.Order: + ~.order_messages.Order: The ``Order`` resource. """ @@ -329,8 +329,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = order_service.Order() - pb_resp = order_service.Order.pb(resp) + resp = order_messages.Order() + pb_resp = order_messages.Order.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_order(resp) @@ -426,7 +426,7 @@ def __call__( @property def get_order( self, - ) -> Callable[[order_service.GetOrderRequest], order_service.Order]: + ) -> Callable[[order_service.GetOrderRequest], order_messages.Order]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetOrder(self._session, self._host, self._interceptor) # type: ignore @@ -470,11 +470,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py index 313b200e4c3d..0b662d3c0e41 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py @@ -52,7 +52,11 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.ads.admanager_v1.services.placement_service import pagers -from google.ads.admanager_v1.types import placement_enums, placement_service +from google.ads.admanager_v1.types import ( + placement_enums, + placement_messages, + placement_service, +) from .transports.base import DEFAULT_CLIENT_INFO, PlacementServiceTransport from .transports.rest import PlacementServiceRestTransport @@ -716,7 +720,7 @@ def get_placement( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> placement_service.Placement: + ) -> placement_messages.Placement: r"""API to retrieve a ``Placement`` object. .. code-block:: python diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/pagers.py index cff0babfac9a..8dfe5d886b93 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import placement_service +from google.ads.admanager_v1.types import placement_messages, placement_service class ListPlacementsPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[placement_service.ListPlacementsResponse]: ) yield self._response - def __iter__(self) -> Iterator[placement_service.Placement]: + def __iter__(self) -> Iterator[placement_messages.Placement]: for page in self.pages: yield from page.placements diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/base.py index 0012313340aa..5a18c192fe76 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import placement_service +from google.ads.admanager_v1.types import placement_messages, placement_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -155,7 +155,7 @@ def get_placement( self, ) -> Callable[ [placement_service.GetPlacementRequest], - Union[placement_service.Placement, Awaitable[placement_service.Placement]], + Union[placement_messages.Placement, Awaitable[placement_messages.Placement]], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/rest.py index 319d89f6a961..65d93682cb02 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import placement_service +from google.ads.admanager_v1.types import placement_messages, placement_service from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import PlacementServiceTransport @@ -100,8 +100,8 @@ def pre_get_placement( return request, metadata def post_get_placement( - self, response: placement_service.Placement - ) -> placement_service.Placement: + self, response: placement_messages.Placement + ) -> placement_messages.Placement: """Post-rpc interceptor for get_placement Override in a subclass to manipulate the response @@ -274,7 +274,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> placement_service.Placement: + ) -> placement_messages.Placement: r"""Call the get placement method over HTTP. Args: @@ -287,7 +287,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.placement_service.Placement: + ~.placement_messages.Placement: The ``Placement`` resource. """ @@ -331,8 +331,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = placement_service.Placement() - pb_resp = placement_service.Placement.pb(resp) + resp = placement_messages.Placement() + pb_resp = placement_messages.Placement.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_placement(resp) @@ -428,7 +428,9 @@ def __call__( @property def get_placement( self, - ) -> Callable[[placement_service.GetPlacementRequest], placement_service.Placement]: + ) -> Callable[ + [placement_service.GetPlacementRequest], placement_messages.Placement + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetPlacement(self._session, self._host, self._interceptor) # type: ignore @@ -475,11 +477,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py index 90e0b153f8af..ebe8fb88cd0d 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py @@ -51,7 +51,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.ads.admanager_v1.services.report_service import pagers from google.ads.admanager_v1.types import report_service from .transports.base import DEFAULT_CLIENT_INFO, ReportServiceTransport @@ -92,7 +95,7 @@ def get_transport_class( class ReportServiceClient(metaclass=ReportServiceClientMeta): - """Provides methods for interacting with Reports.""" + """Provides methods for interacting with reports.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -180,6 +183,21 @@ def transport(self) -> ReportServiceTransport: """ return self._transport + @staticmethod + def network_path( + network_code: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "networks/{network_code}".format( + network_code=network_code, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match(r"^networks/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def report_path( network_code: str, @@ -669,22 +687,16 @@ def __init__( api_audience=self._client_options.api_audience, ) - def export_saved_report( + def get_report( self, - request: Optional[Union[report_service.ExportSavedReportRequest, dict]] = None, + request: Optional[Union[report_service.GetReportRequest, dict]] = None, *, - report: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Initiates the execution and export of a report - asynchronously. Users can get the report by polling this - operation via OperationsService.GetOperation. - Intervals of at least 2 seconds are recommended, with an - exponential backoff. Once a report is complete, the - operation will contain a ExportSavedReportResponse in - its response field. + ) -> report_service.Report: + r"""API to retrieve a ``Report`` object. .. code-block:: python @@ -697,39 +709,489 @@ def export_saved_report( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.ads import admanager_v1 - def sample_export_saved_report(): + def sample_get_report(): # Create a client client = admanager_v1.ReportServiceClient() # Initialize request argument(s) - request = admanager_v1.ExportSavedReportRequest( - format_="XML", + request = admanager_v1.GetReportRequest( + name="name_value", ) # Make the request - operation = client.export_saved_report(request=request) + response = client.get_report(request=request) - print("Waiting for operation to complete...") + # Handle the response + print(response) - response = operation.result() + Args: + request (Union[google.ads.admanager_v1.types.GetReportRequest, dict]): + The request object. Request object for ``GetReport`` method. + name (str): + Required. The resource name of the report. Format: + ``networks/{network_code}/reports/{report_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.Report: + The Report resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, report_service.GetReportRequest): + request = report_service.GetReportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_reports( + self, + request: Optional[Union[report_service.ListReportsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListReportsPager: + r"""API to retrieve a list of ``Report`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_list_reports(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + request = admanager_v1.ListReportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_reports(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.ListReportsRequest, dict]): + The request object. Request object for ``ListReports`` method. + parent (str): + Required. The parent, which owns this collection of + reports. Format: ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.services.report_service.pagers.ListReportsPager: + Response object for ListReportsResponse containing matching Report + objects. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, report_service.ListReportsRequest): + request = report_service.ListReportsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_reports] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListReportsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_report( + self, + request: Optional[Union[report_service.CreateReportRequest, dict]] = None, + *, + parent: Optional[str] = None, + report: Optional[report_service.Report] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.Report: + r"""API to create a ``Report`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_create_report(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + report = admanager_v1.Report() + report.report_definition.dimensions = ['CUSTOM_DIMENSION_9_VALUE'] + report.report_definition.metrics = ['YIELD_GROUP_MEDIATION_THIRD_PARTY_ECPM'] + report.report_definition.report_type = "HISTORICAL" + + request = admanager_v1.CreateReportRequest( + parent="parent_value", + report=report, + ) + + # Make the request + response = client.create_report(request=request) # Handle the response print(response) Args: - request (Union[google.ads.admanager_v1.types.ExportSavedReportRequest, dict]): - The request object. Request proto for the configuration - of a report run. - report (str): - The name of a particular saved report resource. + request (Union[google.ads.admanager_v1.types.CreateReportRequest, dict]): + The request object. Request object for ``CreateReport`` method. + parent (str): + Required. The parent resource where this ``Report`` will + be created. Format: ``networks/{network_code}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + report (google.ads.admanager_v1.types.Report): + Required. The ``Report`` to create. + This corresponds to the ``report`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.Report: + The Report resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, report]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, report_service.CreateReportRequest): + request = report_service.CreateReportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if report is not None: + request.report = report - A report will be run based on the specification of this - saved report. It must have the format of - "networks/{network_code}/reports/{report_id}" + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_report] + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_report( + self, + request: Optional[Union[report_service.UpdateReportRequest, dict]] = None, + *, + report: Optional[report_service.Report] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.Report: + r"""API to update a ``Report`` object. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_update_report(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + report = admanager_v1.Report() + report.report_definition.dimensions = ['CUSTOM_DIMENSION_9_VALUE'] + report.report_definition.metrics = ['YIELD_GROUP_MEDIATION_THIRD_PARTY_ECPM'] + report.report_definition.report_type = "HISTORICAL" + + request = admanager_v1.UpdateReportRequest( + report=report, + ) + + # Make the request + response = client.update_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.UpdateReportRequest, dict]): + The request object. Request object for ``UpdateReport`` method. + report (google.ads.admanager_v1.types.Report): + Required. The ``Report`` to update. This corresponds to the ``report`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.types.Report: + The Report resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([report, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, report_service.UpdateReportRequest): + request = report_service.UpdateReportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if report is not None: + request.report = report + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("report.name", request.report.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_report( + self, + request: Optional[Union[report_service.RunReportRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Initiates the execution of an existing report asynchronously. + Users can get the report by polling this operation via + ``OperationsService.GetOperation``. Poll every 5 seconds + initially, with an exponential backoff. Once a report is + complete, the operation will contain a ``RunReportResponse`` in + its response field containing a report_result that can be passed + to the ``FetchReportResultRows`` method to retrieve the report + data. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_run_report(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + request = admanager_v1.RunReportRequest( + name="name_value", + ) + + # Make the request + operation = client.run_report(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.RunReportRequest, dict]): + The request object. Request message for a running a + report. + name (str): + Required. The report to run. Format: + ``networks/{network_code}/reports/{report_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -740,14 +1202,15 @@ def sample_export_saved_report(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be :class:`google.ads.admanager_v1.types.ExportSavedReportResponse` Message included in the longrunning Operation result.response field when - the report completes successfully. + The result type for the operation will be + :class:`google.ads.admanager_v1.types.RunReportResponse` + Response message for a completed RunReport operation. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([report]) + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -756,21 +1219,21 @@ def sample_export_saved_report(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, report_service.ExportSavedReportRequest): - request = report_service.ExportSavedReportRequest(request) + if not isinstance(request, report_service.RunReportRequest): + request = report_service.RunReportRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if report is not None: - request.report = report + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.export_saved_report] + rpc = self._transport._wrapped_methods[self._transport.run_report] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("report", request.report),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -788,8 +1251,131 @@ def sample_export_saved_report(): response = operation.from_gapic( response, self._transport.operations_client, - report_service.ExportSavedReportResponse, - metadata_type=report_service.ExportSavedReportMetadata, + report_service.RunReportResponse, + metadata_type=report_service.RunReportMetadata, + ) + + # Done; return the response. + return response + + def fetch_report_result_rows( + self, + request: Optional[ + Union[report_service.FetchReportResultRowsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchReportResultRowsPager: + r"""Returns the result rows from a completed report. The caller must + have previously called ``RunReport`` and waited for that + operation to complete. The rows will be returned according to + the order specified by the ``sorts`` member of the report + definition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import admanager_v1 + + def sample_fetch_report_result_rows(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + request = admanager_v1.FetchReportResultRowsRequest( + ) + + # Make the request + page_result = client.fetch_report_result_rows(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.admanager_v1.types.FetchReportResultRowsRequest, dict]): + The request object. The request message for the fetch + report result rows endpoint. + name (str): + The report result being fetched. Format: + ``networks/{network_code}/reports/{report_id}/results/{report_result_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.admanager_v1.services.report_service.pagers.FetchReportResultRowsPager: + The response message for the fetch + report result rows endpoint. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, report_service.FetchReportResultRowsRequest): + request = report_service.FetchReportResultRowsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_report_result_rows] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchReportResultRowsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, ) # Done; return the response. diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/pagers.py new file mode 100644 index 000000000000..5299974427f5 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/pagers.py @@ -0,0 +1,189 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.ads.admanager_v1.types import report_service + + +class ListReportsPager: + """A pager for iterating through ``list_reports`` requests. + + This class thinly wraps an initial + :class:`google.ads.admanager_v1.types.ListReportsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``reports`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListReports`` requests and continue to iterate + through the ``reports`` field on the + corresponding responses. + + All the usual :class:`google.ads.admanager_v1.types.ListReportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., report_service.ListReportsResponse], + request: report_service.ListReportsRequest, + response: report_service.ListReportsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.admanager_v1.types.ListReportsRequest): + The initial request object. + response (google.ads.admanager_v1.types.ListReportsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = report_service.ListReportsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[report_service.ListReportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[report_service.Report]: + for page in self.pages: + yield from page.reports + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchReportResultRowsPager: + """A pager for iterating through ``fetch_report_result_rows`` requests. + + This class thinly wraps an initial + :class:`google.ads.admanager_v1.types.FetchReportResultRowsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``rows`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchReportResultRows`` requests and continue to iterate + through the ``rows`` field on the + corresponding responses. + + All the usual :class:`google.ads.admanager_v1.types.FetchReportResultRowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., report_service.FetchReportResultRowsResponse], + request: report_service.FetchReportResultRowsRequest, + response: report_service.FetchReportResultRowsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.admanager_v1.types.FetchReportResultRowsRequest): + The initial request object. + response (google.ads.admanager_v1.types.FetchReportResultRowsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = report_service.FetchReportResultRowsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[report_service.FetchReportResultRowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[report_service.Report.DataTable.Row]: + for page in self.pages: + yield from page.rows + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/base.py index fe244f5d7b89..1fac274ed16e 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/base.py @@ -129,8 +129,33 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - self.export_saved_report: gapic_v1.method.wrap_method( - self.export_saved_report, + self.get_report: gapic_v1.method.wrap_method( + self.get_report, + default_timeout=None, + client_info=client_info, + ), + self.list_reports: gapic_v1.method.wrap_method( + self.list_reports, + default_timeout=None, + client_info=client_info, + ), + self.create_report: gapic_v1.method.wrap_method( + self.create_report, + default_timeout=None, + client_info=client_info, + ), + self.update_report: gapic_v1.method.wrap_method( + self.update_report, + default_timeout=None, + client_info=client_info, + ), + self.run_report: gapic_v1.method.wrap_method( + self.run_report, + default_timeout=None, + client_info=client_info, + ), + self.fetch_report_result_rows: gapic_v1.method.wrap_method( + self.fetch_report_result_rows, default_timeout=None, client_info=client_info, ), @@ -151,14 +176,65 @@ def operations_client(self): raise NotImplementedError() @property - def export_saved_report( + def get_report( self, ) -> Callable[ - [report_service.ExportSavedReportRequest], + [report_service.GetReportRequest], + Union[report_service.Report, Awaitable[report_service.Report]], + ]: + raise NotImplementedError() + + @property + def list_reports( + self, + ) -> Callable[ + [report_service.ListReportsRequest], + Union[ + report_service.ListReportsResponse, + Awaitable[report_service.ListReportsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_report( + self, + ) -> Callable[ + [report_service.CreateReportRequest], + Union[report_service.Report, Awaitable[report_service.Report]], + ]: + raise NotImplementedError() + + @property + def update_report( + self, + ) -> Callable[ + [report_service.UpdateReportRequest], + Union[report_service.Report, Awaitable[report_service.Report]], + ]: + raise NotImplementedError() + + @property + def run_report( + self, + ) -> Callable[ + [report_service.RunReportRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() + @property + def fetch_report_result_rows( + self, + ) -> Callable[ + [report_service.FetchReportResultRowsRequest], + Union[ + report_service.FetchReportResultRowsResponse, + Awaitable[report_service.FetchReportResultRowsResponse], + ], + ]: + raise NotImplementedError() + @property def get_operation( self, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/rest.py index 9b51c99ce9b2..c529442126ea 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/rest.py @@ -71,11 +71,51 @@ class ReportServiceRestInterceptor: .. code-block:: python class MyCustomReportServiceInterceptor(ReportServiceRestInterceptor): - def pre_export_saved_report(self, request, metadata): + def pre_create_report(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_export_saved_report(self, response): + def post_create_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_report_result_rows(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_report_result_rows(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_reports(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_reports(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_report(self, response): logging.log(f"Received response: {response}") return response @@ -85,22 +125,135 @@ def post_export_saved_report(self, response): """ - def pre_export_saved_report( + def pre_create_report( + self, + request: report_service.CreateReportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[report_service.CreateReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReportService server. + """ + return request, metadata + + def post_create_report( + self, response: report_service.Report + ) -> report_service.Report: + """Post-rpc interceptor for create_report + + Override in a subclass to manipulate the response + after it is returned by the ReportService server but before + it is returned to user code. + """ + return response + + def pre_fetch_report_result_rows( + self, + request: report_service.FetchReportResultRowsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[report_service.FetchReportResultRowsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_report_result_rows + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReportService server. + """ + return request, metadata + + def post_fetch_report_result_rows( + self, response: report_service.FetchReportResultRowsResponse + ) -> report_service.FetchReportResultRowsResponse: + """Post-rpc interceptor for fetch_report_result_rows + + Override in a subclass to manipulate the response + after it is returned by the ReportService server but before + it is returned to user code. + """ + return response + + def pre_get_report( + self, + request: report_service.GetReportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[report_service.GetReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReportService server. + """ + return request, metadata + + def post_get_report(self, response: report_service.Report) -> report_service.Report: + """Post-rpc interceptor for get_report + + Override in a subclass to manipulate the response + after it is returned by the ReportService server but before + it is returned to user code. + """ + return response + + def pre_list_reports( self, - request: report_service.ExportSavedReportRequest, + request: report_service.ListReportsRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[report_service.ExportSavedReportRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for export_saved_report + ) -> Tuple[report_service.ListReportsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_reports Override in a subclass to manipulate the request or metadata before they are sent to the ReportService server. """ return request, metadata - def post_export_saved_report( + def post_list_reports( + self, response: report_service.ListReportsResponse + ) -> report_service.ListReportsResponse: + """Post-rpc interceptor for list_reports + + Override in a subclass to manipulate the response + after it is returned by the ReportService server but before + it is returned to user code. + """ + return response + + def pre_run_report( + self, + request: report_service.RunReportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[report_service.RunReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReportService server. + """ + return request, metadata + + def post_run_report( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for export_saved_report + """Post-rpc interceptor for run_report + + Override in a subclass to manipulate the response + after it is returned by the ReportService server but before + it is returned to user code. + """ + return response + + def pre_update_report( + self, + request: report_service.UpdateReportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[report_service.UpdateReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReportService server. + """ + return request, metadata + + def post_update_report( + self, response: report_service.Report + ) -> report_service.Report: + """Post-rpc interceptor for update_report Override in a subclass to manipulate the response after it is returned by the ReportService server but before @@ -142,7 +295,7 @@ class ReportServiceRestStub: class ReportServiceRestTransport(ReportServiceTransport): """REST backend transport for ReportService. - Provides methods for interacting with Reports. + Provides methods for interacting with reports. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -242,11 +395,11 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "google.longrunning.Operations.GetOperation": [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ], } @@ -267,9 +420,352 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client - class _ExportSavedReport(ReportServiceRestStub): + class _CreateReport(ReportServiceRestStub): + def __hash__(self): + return hash("CreateReport") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: report_service.CreateReportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.Report: + r"""Call the create report method over HTTP. + + Args: + request (~.report_service.CreateReportRequest): + The request object. Request object for ``CreateReport`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.report_service.Report: + The ``Report`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=networks/*}/reports", + "body": "report", + }, + ] + request, metadata = self._interceptor.pre_create_report(request, metadata) + pb_request = report_service.CreateReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = report_service.Report() + pb_resp = report_service.Report.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_report(resp) + return resp + + class _FetchReportResultRows(ReportServiceRestStub): + def __hash__(self): + return hash("FetchReportResultRows") + + def __call__( + self, + request: report_service.FetchReportResultRowsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.FetchReportResultRowsResponse: + r"""Call the fetch report result rows method over HTTP. + + Args: + request (~.report_service.FetchReportResultRowsRequest): + The request object. The request message for the fetch + report result rows endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.report_service.FetchReportResultRowsResponse: + The response message for the fetch + report result rows endpoint. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=networks/*/reports/*/results/*}:fetchRows", + }, + ] + request, metadata = self._interceptor.pre_fetch_report_result_rows( + request, metadata + ) + pb_request = report_service.FetchReportResultRowsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = report_service.FetchReportResultRowsResponse() + pb_resp = report_service.FetchReportResultRowsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_report_result_rows(resp) + return resp + + class _GetReport(ReportServiceRestStub): + def __hash__(self): + return hash("GetReport") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: report_service.GetReportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.Report: + r"""Call the get report method over HTTP. + + Args: + request (~.report_service.GetReportRequest): + The request object. Request object for ``GetReport`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.report_service.Report: + The ``Report`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=networks/*/reports/*}", + }, + ] + request, metadata = self._interceptor.pre_get_report(request, metadata) + pb_request = report_service.GetReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = report_service.Report() + pb_resp = report_service.Report.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_report(resp) + return resp + + class _ListReports(ReportServiceRestStub): + def __hash__(self): + return hash("ListReports") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: report_service.ListReportsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.ListReportsResponse: + r"""Call the list reports method over HTTP. + + Args: + request (~.report_service.ListReportsRequest): + The request object. Request object for ``ListReports`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.report_service.ListReportsResponse: + Response object for ``ListReportsResponse`` containing + matching ``Report`` objects. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=networks/*}/reports", + }, + ] + request, metadata = self._interceptor.pre_list_reports(request, metadata) + pb_request = report_service.ListReportsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = report_service.ListReportsResponse() + pb_resp = report_service.ListReportsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_reports(resp) + return resp + + class _RunReport(ReportServiceRestStub): def __hash__(self): - return hash("ExportSavedReport") + return hash("RunReport") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -283,18 +779,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: report_service.ExportSavedReportRequest, + request: report_service.RunReportRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the export saved report method over HTTP. + r"""Call the run report method over HTTP. Args: - request (~.report_service.ExportSavedReportRequest): - The request object. Request proto for the configuration - of a report run. + request (~.report_service.RunReportRequest): + The request object. Request message for a running a + report. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -312,14 +808,12 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{report=networks/*/reports/*}:exportSavedReport", + "uri": "/v1/{name=networks/*/reports/*}:run", "body": "*", }, ] - request, metadata = self._interceptor.pre_export_saved_report( - request, metadata - ) - pb_request = report_service.ExportSavedReportRequest.pb(request) + request, metadata = self._interceptor.pre_run_report(request, metadata) + pb_request = report_service.RunReportRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -360,16 +854,155 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_export_saved_report(resp) + resp = self._interceptor.post_run_report(resp) + return resp + + class _UpdateReport(ReportServiceRestStub): + def __hash__(self): + return hash("UpdateReport") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: report_service.UpdateReportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_service.Report: + r"""Call the update report method over HTTP. + + Args: + request (~.report_service.UpdateReportRequest): + The request object. Request object for ``UpdateReport`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.report_service.Report: + The ``Report`` resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{report.name=networks/*/reports/*}", + "body": "report", + }, + ] + request, metadata = self._interceptor.pre_update_report(request, metadata) + pb_request = report_service.UpdateReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = report_service.Report() + pb_resp = report_service.Report.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_report(resp) return resp @property - def export_saved_report( + def create_report( + self, + ) -> Callable[[report_service.CreateReportRequest], report_service.Report]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_report_result_rows( + self, + ) -> Callable[ + [report_service.FetchReportResultRowsRequest], + report_service.FetchReportResultRowsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchReportResultRows(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_report( + self, + ) -> Callable[[report_service.GetReportRequest], report_service.Report]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_reports( + self, + ) -> Callable[ + [report_service.ListReportsRequest], report_service.ListReportsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListReports(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_report( self, - ) -> Callable[[report_service.ExportSavedReportRequest], operations_pb2.Operation]: + ) -> Callable[[report_service.RunReportRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ExportSavedReport(self._session, self._host, self._interceptor) # type: ignore + return self._RunReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_report( + self, + ) -> Callable[[report_service.UpdateReportRequest], report_service.Report]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateReport(self._session, self._host, self._interceptor) # type: ignore @property def get_operation(self): @@ -402,11 +1035,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py index bae3794b29dd..2b7295b48ef8 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py @@ -51,7 +51,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.ads.admanager_v1.services.role_service import pagers -from google.ads.admanager_v1.types import role_service +from google.ads.admanager_v1.types import role_enums, role_messages, role_service from .transports.base import DEFAULT_CLIENT_INFO, RoleServiceTransport from .transports.rest import RoleServiceRestTransport @@ -91,7 +91,7 @@ def get_transport_class( class RoleServiceClient(metaclass=RoleServiceClientMeta): - """Provides methods for handling Role objects.""" + """Provides methods for handling ``Role`` objects.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -688,8 +688,8 @@ def get_role( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> role_service.Role: - r"""API to retrieve a Role object. + ) -> role_messages.Role: + r"""API to retrieve a ``Role`` object. .. code-block:: python @@ -719,7 +719,7 @@ def sample_get_role(): Args: request (Union[google.ads.admanager_v1.types.GetRoleRequest, dict]): - The request object. Request object for GetRole method. + The request object. Request object for ``GetRole`` method. name (str): Required. The resource name of the Role. Format: ``networks/{network_code}/roles/{role_id}`` @@ -789,7 +789,7 @@ def list_roles( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListRolesPager: - r"""API to retrieve a list of Role objects. + r"""API to retrieve a list of ``Role`` objects. .. code-block:: python @@ -820,7 +820,7 @@ def sample_list_roles(): Args: request (Union[google.ads.admanager_v1.types.ListRolesRequest, dict]): - The request object. Request object for ListRoles method. + The request object. Request object for ``ListRoles`` method. parent (str): Required. The parent, which owns this collection of Roles. Format: ``networks/{network_code}`` @@ -836,12 +836,11 @@ def sample_list_roles(): Returns: google.ads.admanager_v1.services.role_service.pagers.ListRolesPager: - Response object for ListRolesRequest - containing matching Role resources. + Response object for ListRolesRequest containing matching + Role objects. - Iterating over this object will yield - results and resolve additional pages - automatically. + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/pagers.py index 93a125b6f0cc..be9ac9755dd2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/pagers.py @@ -38,7 +38,7 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import role_service +from google.ads.admanager_v1.types import role_messages, role_service class ListRolesPager: @@ -107,7 +107,7 @@ def pages(self) -> Iterator[role_service.ListRolesResponse]: ) yield self._response - def __iter__(self) -> Iterator[role_service.Role]: + def __iter__(self) -> Iterator[role_messages.Role]: for page in self.pages: yield from page.roles diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/base.py index c90135acb761..527ea039fdf5 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import role_service +from google.ads.admanager_v1.types import role_messages, role_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -155,7 +155,7 @@ def get_role( self, ) -> Callable[ [role_service.GetRoleRequest], - Union[role_service.Role, Awaitable[role_service.Role]], + Union[role_messages.Role, Awaitable[role_messages.Role]], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/rest.py index c34a643c70d7..c729f35e3b0b 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import role_service +from google.ads.admanager_v1.types import role_messages, role_service from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import RoleServiceTransport @@ -97,7 +97,7 @@ def pre_get_role( """ return request, metadata - def post_get_role(self, response: role_service.Role) -> role_service.Role: + def post_get_role(self, response: role_messages.Role) -> role_messages.Role: """Post-rpc interceptor for get_role Override in a subclass to manipulate the response @@ -163,7 +163,7 @@ class RoleServiceRestStub: class RoleServiceRestTransport(RoleServiceTransport): """REST backend transport for RoleService. - Provides methods for handling Role objects. + Provides methods for handling ``Role`` objects. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -270,12 +270,12 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> role_service.Role: + ) -> role_messages.Role: r"""Call the get role method over HTTP. Args: request (~.role_service.GetRoleRequest): - The request object. Request object for GetRole method. + The request object. Request object for ``GetRole`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -283,8 +283,8 @@ def __call__( sent along with the request as metadata. Returns: - ~.role_service.Role: - The Role resource. + ~.role_messages.Role: + The ``Role`` resource. """ http_options: List[Dict[str, str]] = [ @@ -327,8 +327,8 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = role_service.Role() - pb_resp = role_service.Role.pb(resp) + resp = role_messages.Role() + pb_resp = role_messages.Role.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_role(resp) @@ -360,7 +360,7 @@ def __call__( Args: request (~.role_service.ListRolesRequest): - The request object. Request object for ListRoles method. + The request object. Request object for ``ListRoles`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -369,8 +369,8 @@ def __call__( Returns: ~.role_service.ListRolesResponse: - Response object for ListRolesRequest - containing matching Role resources. + Response object for ``ListRolesRequest`` containing + matching ``Role`` objects. """ @@ -422,7 +422,7 @@ def __call__( return resp @property - def get_role(self) -> Callable[[role_service.GetRoleRequest], role_service.Role]: + def get_role(self) -> Callable[[role_service.GetRoleRequest], role_messages.Role]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetRole(self._session, self._host, self._interceptor) # type: ignore @@ -466,11 +466,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/__init__.py similarity index 86% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/__init__.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/__init__.py index e22dd3b66e24..8d84490b8655 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/__init__.py @@ -13,6 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import AdPartnerServiceClient +from .client import TaxonomyCategoryServiceClient -__all__ = ("AdPartnerServiceClient",) +__all__ = ("TaxonomyCategoryServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py similarity index 84% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/client.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py index f3907ddcd88e..d40f3a38712a 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py @@ -50,15 +50,19 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.services.ad_partner_service import pagers -from google.ads.admanager_v1.types import ad_partner_service +from google.ads.admanager_v1.services.taxonomy_category_service import pagers +from google.ads.admanager_v1.types import ( + taxonomy_category_messages, + taxonomy_category_service, + taxonomy_type_enum, +) -from .transports.base import DEFAULT_CLIENT_INFO, AdPartnerServiceTransport -from .transports.rest import AdPartnerServiceRestTransport +from .transports.base import DEFAULT_CLIENT_INFO, TaxonomyCategoryServiceTransport +from .transports.rest import TaxonomyCategoryServiceRestTransport -class AdPartnerServiceClientMeta(type): - """Metaclass for the AdPartnerService client. +class TaxonomyCategoryServiceClientMeta(type): + """Metaclass for the TaxonomyCategoryService client. This provides class-level methods for building and retrieving support objects (e.g. transport) without polluting the client instance @@ -67,13 +71,13 @@ class AdPartnerServiceClientMeta(type): _transport_registry = ( OrderedDict() - ) # type: Dict[str, Type[AdPartnerServiceTransport]] - _transport_registry["rest"] = AdPartnerServiceRestTransport + ) # type: Dict[str, Type[TaxonomyCategoryServiceTransport]] + _transport_registry["rest"] = TaxonomyCategoryServiceRestTransport def get_transport_class( cls, label: Optional[str] = None, - ) -> Type[AdPartnerServiceTransport]: + ) -> Type[TaxonomyCategoryServiceTransport]: """Returns an appropriate transport class. Args: @@ -92,8 +96,8 @@ def get_transport_class( return next(iter(cls._transport_registry.values())) -class AdPartnerServiceClient(metaclass=AdPartnerServiceClientMeta): - """Provides methods for handling AdPartner objects.""" +class TaxonomyCategoryServiceClient(metaclass=TaxonomyCategoryServiceClientMeta): + """Provides methods for handling ``TaxonomyCategory`` objects.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -145,7 +149,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - AdPartnerServiceClient: The constructed client. + TaxonomyCategoryServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials @@ -163,7 +167,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - AdPartnerServiceClient: The constructed client. + TaxonomyCategoryServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -172,47 +176,48 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @property - def transport(self) -> AdPartnerServiceTransport: + def transport(self) -> TaxonomyCategoryServiceTransport: """Returns the transport used by the client instance. Returns: - AdPartnerServiceTransport: The transport used by the client + TaxonomyCategoryServiceTransport: The transport used by the client instance. """ return self._transport @staticmethod - def ad_partner_path( + def network_path( network_code: str, - ad_partner: str, ) -> str: - """Returns a fully-qualified ad_partner string.""" - return "networks/{network_code}/adPartners/{ad_partner}".format( + """Returns a fully-qualified network string.""" + return "networks/{network_code}".format( network_code=network_code, - ad_partner=ad_partner, ) @staticmethod - def parse_ad_partner_path(path: str) -> Dict[str, str]: - """Parses a ad_partner path into its component segments.""" - m = re.match( - r"^networks/(?P.+?)/adPartners/(?P.+?)$", path - ) + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match(r"^networks/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def network_path( + def taxonomy_category_path( network_code: str, + taxonomy_category: str, ) -> str: - """Returns a fully-qualified network string.""" - return "networks/{network_code}".format( + """Returns a fully-qualified taxonomy_category string.""" + return "networks/{network_code}/taxonomyCategories/{taxonomy_category}".format( network_code=network_code, + taxonomy_category=taxonomy_category, ) @staticmethod - def parse_network_path(path: str) -> Dict[str, str]: - """Parses a network path into its component segments.""" - m = re.match(r"^networks/(?P.+?)$", path) + def parse_taxonomy_category_path(path: str) -> Dict[str, str]: + """Parses a taxonomy_category path into its component segments.""" + m = re.match( + r"^networks/(?P.+?)/taxonomyCategories/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod @@ -434,15 +439,17 @@ def _get_api_endpoint( elif use_mtls_endpoint == "always" or ( use_mtls_endpoint == "auto" and client_cert_source ): - _default_universe = AdPartnerServiceClient._DEFAULT_UNIVERSE + _default_universe = TaxonomyCategoryServiceClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: raise MutualTLSChannelError( f"mTLS is not supported in any universe other than {_default_universe}." ) - api_endpoint = AdPartnerServiceClient.DEFAULT_MTLS_ENDPOINT + api_endpoint = TaxonomyCategoryServiceClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = AdPartnerServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=universe_domain + api_endpoint = ( + TaxonomyCategoryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) ) return api_endpoint @@ -462,7 +469,7 @@ def _get_universe_domain( Raises: ValueError: If the universe domain is an empty string. """ - universe_domain = AdPartnerServiceClient._DEFAULT_UNIVERSE + universe_domain = TaxonomyCategoryServiceClient._DEFAULT_UNIVERSE if client_universe_domain is not None: universe_domain = client_universe_domain elif universe_domain_env is not None: @@ -488,7 +495,7 @@ def _compare_universes( ValueError: when client_universe does not match the universe in credentials. """ - default_universe = AdPartnerServiceClient._DEFAULT_UNIVERSE + default_universe = TaxonomyCategoryServiceClient._DEFAULT_UNIVERSE credentials_universe = getattr(credentials, "universe_domain", default_universe) if client_universe != credentials_universe: @@ -512,7 +519,7 @@ def _validate_universe_domain(self): """ self._is_universe_domain_valid = ( self._is_universe_domain_valid - or AdPartnerServiceClient._compare_universes( + or TaxonomyCategoryServiceClient._compare_universes( self.universe_domain, self.transport._credentials ) ) @@ -542,13 +549,15 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[ Union[ - str, AdPartnerServiceTransport, Callable[..., AdPartnerServiceTransport] + str, + TaxonomyCategoryServiceTransport, + Callable[..., TaxonomyCategoryServiceTransport], ] ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the ad partner service client. + """Instantiates the taxonomy category service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -556,10 +565,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Optional[Union[str,AdPartnerServiceTransport,Callable[..., AdPartnerServiceTransport]]]): + transport (Optional[Union[str,TaxonomyCategoryServiceTransport,Callable[..., TaxonomyCategoryServiceTransport]]]): The transport to use, or a Callable that constructs and returns a new transport. If a Callable is given, it will be called with the same set of initialization - arguments as used in the AdPartnerServiceTransport constructor. + arguments as used in the TaxonomyCategoryServiceTransport constructor. If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -612,11 +621,13 @@ def __init__( self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env, - ) = AdPartnerServiceClient._read_environment_variables() - self._client_cert_source = AdPartnerServiceClient._get_client_cert_source( - self._client_options.client_cert_source, self._use_client_cert + ) = TaxonomyCategoryServiceClient._read_environment_variables() + self._client_cert_source = ( + TaxonomyCategoryServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) ) - self._universe_domain = AdPartnerServiceClient._get_universe_domain( + self._universe_domain = TaxonomyCategoryServiceClient._get_universe_domain( universe_domain_opt, self._universe_domain_env ) self._api_endpoint = None # updated below, depending on `transport` @@ -633,9 +644,9 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AdPartnerServiceTransport) + transport_provided = isinstance(transport, TaxonomyCategoryServiceTransport) if transport_provided: - # transport is a AdPartnerServiceTransport instance. + # transport is a TaxonomyCategoryServiceTransport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " @@ -646,12 +657,12 @@ def __init__( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = cast(AdPartnerServiceTransport, transport) + self._transport = cast(TaxonomyCategoryServiceTransport, transport) self._api_endpoint = self._transport.host self._api_endpoint = ( self._api_endpoint - or AdPartnerServiceClient._get_api_endpoint( + or TaxonomyCategoryServiceClient._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, @@ -670,12 +681,12 @@ def __init__( ) transport_init: Union[ - Type[AdPartnerServiceTransport], - Callable[..., AdPartnerServiceTransport], + Type[TaxonomyCategoryServiceTransport], + Callable[..., TaxonomyCategoryServiceTransport], ] = ( - AdPartnerServiceClient.get_transport_class(transport) + TaxonomyCategoryServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None - else cast(Callable[..., AdPartnerServiceTransport], transport) + else cast(Callable[..., TaxonomyCategoryServiceTransport], transport) ) # initialize with the provided callable or the passed in class self._transport = transport_init( @@ -690,16 +701,18 @@ def __init__( api_audience=self._client_options.api_audience, ) - def get_ad_partner( + def get_taxonomy_category( self, - request: Optional[Union[ad_partner_service.GetAdPartnerRequest, dict]] = None, + request: Optional[ + Union[taxonomy_category_service.GetTaxonomyCategoryRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_partner_service.AdPartner: - r"""API to retrieve a AdPartner object. + ) -> taxonomy_category_messages.TaxonomyCategory: + r"""API to retrieve a ``TaxonomyCategory`` object. .. code-block:: python @@ -712,28 +725,28 @@ def get_ad_partner( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.ads import admanager_v1 - def sample_get_ad_partner(): + def sample_get_taxonomy_category(): # Create a client - client = admanager_v1.AdPartnerServiceClient() + client = admanager_v1.TaxonomyCategoryServiceClient() # Initialize request argument(s) - request = admanager_v1.GetAdPartnerRequest( + request = admanager_v1.GetTaxonomyCategoryRequest( name="name_value", ) # Make the request - response = client.get_ad_partner(request=request) + response = client.get_taxonomy_category(request=request) # Handle the response print(response) Args: - request (Union[google.ads.admanager_v1.types.GetAdPartnerRequest, dict]): - The request object. Request object for GetAdPartner - method. + request (Union[google.ads.admanager_v1.types.GetTaxonomyCategoryRequest, dict]): + The request object. Request object for ``GetTaxonomyCategory`` method. name (str): - Required. The resource name of the AdPartner. Format: - ``networks/{network_code}/adPartners/{ad_partner_id}`` + Required. The resource name of the TaxonomyCategory. + Format: + ``networks/{network_code}/taxonomyCategories/{taxonomy_category_id}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -745,8 +758,8 @@ def sample_get_ad_partner(): sent along with the request as metadata. Returns: - google.ads.admanager_v1.types.AdPartner: - The AdPartner resource. + google.ads.admanager_v1.types.TaxonomyCategory: + The TaxonomyCategory resource. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -760,8 +773,10 @@ def sample_get_ad_partner(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, ad_partner_service.GetAdPartnerRequest): - request = ad_partner_service.GetAdPartnerRequest(request) + if not isinstance( + request, taxonomy_category_service.GetTaxonomyCategoryRequest + ): + request = taxonomy_category_service.GetTaxonomyCategoryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -769,7 +784,7 @@ def sample_get_ad_partner(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_ad_partner] + rpc = self._transport._wrapped_methods[self._transport.get_taxonomy_category] # Certain fields should be provided within the metadata header; # add these here. @@ -791,16 +806,18 @@ def sample_get_ad_partner(): # Done; return the response. return response - def list_ad_partners( + def list_taxonomy_categories( self, - request: Optional[Union[ad_partner_service.ListAdPartnersRequest, dict]] = None, + request: Optional[ + Union[taxonomy_category_service.ListTaxonomyCategoriesRequest, dict] + ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAdPartnersPager: - r"""API to retrieve a list of AdPartner objects. + ) -> pagers.ListTaxonomyCategoriesPager: + r"""API to retrieve a list of ``TaxonomyCategory`` objects. .. code-block:: python @@ -813,29 +830,28 @@ def list_ad_partners( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.ads import admanager_v1 - def sample_list_ad_partners(): + def sample_list_taxonomy_categories(): # Create a client - client = admanager_v1.AdPartnerServiceClient() + client = admanager_v1.TaxonomyCategoryServiceClient() # Initialize request argument(s) - request = admanager_v1.ListAdPartnersRequest( + request = admanager_v1.ListTaxonomyCategoriesRequest( parent="parent_value", ) # Make the request - page_result = client.list_ad_partners(request=request) + page_result = client.list_taxonomy_categories(request=request) # Handle the response for response in page_result: print(response) Args: - request (Union[google.ads.admanager_v1.types.ListAdPartnersRequest, dict]): - The request object. Request object for ListAdPartners - method. + request (Union[google.ads.admanager_v1.types.ListTaxonomyCategoriesRequest, dict]): + The request object. Request object for ``ListTaxonomyCategories`` method. parent (str): Required. The parent, which owns this collection of - AdPartners. Format: ``networks/{network_code}`` + TaxonomyCategories. Format: ``networks/{network_code}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -847,14 +863,12 @@ def sample_list_ad_partners(): sent along with the request as metadata. Returns: - google.ads.admanager_v1.services.ad_partner_service.pagers.ListAdPartnersPager: - Response object for - ListAdPartnersRequest containing - matching AdPartner resources. + google.ads.admanager_v1.services.taxonomy_category_service.pagers.ListTaxonomyCategoriesPager: + Response object for ListTaxonomyCategoriesRequest containing matching + TaxonomyCategory objects. - Iterating over this object will yield - results and resolve additional pages - automatically. + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. @@ -869,8 +883,10 @@ def sample_list_ad_partners(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, ad_partner_service.ListAdPartnersRequest): - request = ad_partner_service.ListAdPartnersRequest(request) + if not isinstance( + request, taxonomy_category_service.ListTaxonomyCategoriesRequest + ): + request = taxonomy_category_service.ListTaxonomyCategoriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -878,7 +894,7 @@ def sample_list_ad_partners(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_ad_partners] + rpc = self._transport._wrapped_methods[self._transport.list_taxonomy_categories] # Certain fields should be provided within the metadata header; # add these here. @@ -899,7 +915,7 @@ def sample_list_ad_partners(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListAdPartnersPager( + response = pagers.ListTaxonomyCategoriesPager( method=rpc, request=request, response=response, @@ -911,7 +927,7 @@ def sample_list_ad_partners(): # Done; return the response. return response - def __enter__(self) -> "AdPartnerServiceClient": + def __enter__(self) -> "TaxonomyCategoryServiceClient": return self def __exit__(self, type, value, traceback): @@ -987,4 +1003,4 @@ def get_operation( ) -__all__ = ("AdPartnerServiceClient",) +__all__ = ("TaxonomyCategoryServiceClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/pagers.py similarity index 71% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/pagers.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/pagers.py index 1c99245728a1..ba0e74b14baf 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/pagers.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/pagers.py @@ -38,32 +38,35 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.ads.admanager_v1.types import ad_partner_service +from google.ads.admanager_v1.types import ( + taxonomy_category_messages, + taxonomy_category_service, +) -class ListAdPartnersPager: - """A pager for iterating through ``list_ad_partners`` requests. +class ListTaxonomyCategoriesPager: + """A pager for iterating through ``list_taxonomy_categories`` requests. This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListAdPartnersResponse` object, and + :class:`google.ads.admanager_v1.types.ListTaxonomyCategoriesResponse` object, and provides an ``__iter__`` method to iterate through its - ``ad_partners`` field. + ``taxonomy_categories`` field. If there are more pages, the ``__iter__`` method will make additional - ``ListAdPartners`` requests and continue to iterate - through the ``ad_partners`` field on the + ``ListTaxonomyCategories`` requests and continue to iterate + through the ``taxonomy_categories`` field on the corresponding responses. - All the usual :class:`google.ads.admanager_v1.types.ListAdPartnersResponse` + All the usual :class:`google.ads.admanager_v1.types.ListTaxonomyCategoriesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__( self, - method: Callable[..., ad_partner_service.ListAdPartnersResponse], - request: ad_partner_service.ListAdPartnersRequest, - response: ad_partner_service.ListAdPartnersResponse, + method: Callable[..., taxonomy_category_service.ListTaxonomyCategoriesResponse], + request: taxonomy_category_service.ListTaxonomyCategoriesRequest, + response: taxonomy_category_service.ListTaxonomyCategoriesResponse, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, @@ -74,9 +77,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (google.ads.admanager_v1.types.ListAdPartnersRequest): + request (google.ads.admanager_v1.types.ListTaxonomyCategoriesRequest): The initial request object. - response (google.ads.admanager_v1.types.ListAdPartnersResponse): + response (google.ads.admanager_v1.types.ListTaxonomyCategoriesResponse): The initial response object. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -85,7 +88,7 @@ def __init__( sent along with the request as metadata. """ self._method = method - self._request = ad_partner_service.ListAdPartnersRequest(request) + self._request = taxonomy_category_service.ListTaxonomyCategoriesRequest(request) self._response = response self._retry = retry self._timeout = timeout @@ -95,7 +98,9 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[ad_partner_service.ListAdPartnersResponse]: + def pages( + self, + ) -> Iterator[taxonomy_category_service.ListTaxonomyCategoriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -107,9 +112,9 @@ def pages(self) -> Iterator[ad_partner_service.ListAdPartnersResponse]: ) yield self._response - def __iter__(self) -> Iterator[ad_partner_service.AdPartner]: + def __iter__(self) -> Iterator[taxonomy_category_messages.TaxonomyCategory]: for page in self.pages: - yield from page.ad_partners + yield from page.taxonomy_categories def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/__init__.py similarity index 61% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/__init__.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/__init__.py index f787889ae7b3..7879c3836853 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/__init__.py @@ -16,15 +16,20 @@ from collections import OrderedDict from typing import Dict, Type -from .base import CreativeServiceTransport -from .rest import CreativeServiceRestInterceptor, CreativeServiceRestTransport +from .base import TaxonomyCategoryServiceTransport +from .rest import ( + TaxonomyCategoryServiceRestInterceptor, + TaxonomyCategoryServiceRestTransport, +) # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[CreativeServiceTransport]] -_transport_registry["rest"] = CreativeServiceRestTransport +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[TaxonomyCategoryServiceTransport]] +_transport_registry["rest"] = TaxonomyCategoryServiceRestTransport __all__ = ( - "CreativeServiceTransport", - "CreativeServiceRestTransport", - "CreativeServiceRestInterceptor", + "TaxonomyCategoryServiceTransport", + "TaxonomyCategoryServiceRestTransport", + "TaxonomyCategoryServiceRestInterceptor", ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/base.py similarity index 86% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/base.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/base.py index 4085651854f9..5d03acb614f2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/base.py @@ -26,15 +26,18 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import creative_service +from google.ads.admanager_v1.types import ( + taxonomy_category_messages, + taxonomy_category_service, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) -class CreativeServiceTransport(abc.ABC): - """Abstract transport class for CreativeService.""" +class TaxonomyCategoryServiceTransport(abc.ABC): + """Abstract transport class for TaxonomyCategoryService.""" AUTH_SCOPES = () @@ -129,13 +132,13 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - self.get_creative: gapic_v1.method.wrap_method( - self.get_creative, + self.get_taxonomy_category: gapic_v1.method.wrap_method( + self.get_taxonomy_category, default_timeout=None, client_info=client_info, ), - self.list_creatives: gapic_v1.method.wrap_method( - self.list_creatives, + self.list_taxonomy_categories: gapic_v1.method.wrap_method( + self.list_taxonomy_categories, default_timeout=None, client_info=client_info, ), @@ -151,22 +154,25 @@ def close(self): raise NotImplementedError() @property - def get_creative( + def get_taxonomy_category( self, ) -> Callable[ - [creative_service.GetCreativeRequest], - Union[creative_service.Creative, Awaitable[creative_service.Creative]], + [taxonomy_category_service.GetTaxonomyCategoryRequest], + Union[ + taxonomy_category_messages.TaxonomyCategory, + Awaitable[taxonomy_category_messages.TaxonomyCategory], + ], ]: raise NotImplementedError() @property - def list_creatives( + def list_taxonomy_categories( self, ) -> Callable[ - [creative_service.ListCreativesRequest], + [taxonomy_category_service.ListTaxonomyCategoriesRequest], Union[ - creative_service.ListCreativesResponse, - Awaitable[creative_service.ListCreativesResponse], + taxonomy_category_service.ListTaxonomyCategoriesResponse, + Awaitable[taxonomy_category_service.ListTaxonomyCategoriesResponse], ], ]: raise NotImplementedError() @@ -185,4 +191,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ("CreativeServiceTransport",) +__all__ = ("TaxonomyCategoryServiceTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/rest.py similarity index 73% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/rest.py rename to packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/rest.py index 2c5f921453a5..02cfba6ecb30 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_partner_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/rest.py @@ -38,10 +38,13 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import ad_partner_service +from google.ads.admanager_v1.types import ( + taxonomy_category_messages, + taxonomy_category_service, +) -from .base import AdPartnerServiceTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TaxonomyCategoryServiceTransport DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -50,8 +53,8 @@ ) -class AdPartnerServiceRestInterceptor: - """Interceptor for AdPartnerService. +class TaxonomyCategoryServiceRestInterceptor: + """Interceptor for TaxonomyCategoryService. Interceptors are used to manipulate requests, request metadata, and responses in arbitrary ways. @@ -61,74 +64,79 @@ class AdPartnerServiceRestInterceptor: * Stripping extraneous information from responses These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AdPartnerServiceRestTransport. + instance of a custom subclass when constructing the TaxonomyCategoryServiceRestTransport. .. code-block:: python - class MyCustomAdPartnerServiceInterceptor(AdPartnerServiceRestInterceptor): - def pre_get_ad_partner(self, request, metadata): + class MyCustomTaxonomyCategoryServiceInterceptor(TaxonomyCategoryServiceRestInterceptor): + def pre_get_taxonomy_category(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_ad_partner(self, response): + def post_get_taxonomy_category(self, response): logging.log(f"Received response: {response}") return response - def pre_list_ad_partners(self, request, metadata): + def pre_list_taxonomy_categories(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_ad_partners(self, response): + def post_list_taxonomy_categories(self, response): logging.log(f"Received response: {response}") return response - transport = AdPartnerServiceRestTransport(interceptor=MyCustomAdPartnerServiceInterceptor()) - client = AdPartnerServiceClient(transport=transport) + transport = TaxonomyCategoryServiceRestTransport(interceptor=MyCustomTaxonomyCategoryServiceInterceptor()) + client = TaxonomyCategoryServiceClient(transport=transport) """ - def pre_get_ad_partner( + def pre_get_taxonomy_category( self, - request: ad_partner_service.GetAdPartnerRequest, + request: taxonomy_category_service.GetTaxonomyCategoryRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[ad_partner_service.GetAdPartnerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_ad_partner + ) -> Tuple[ + taxonomy_category_service.GetTaxonomyCategoryRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_taxonomy_category Override in a subclass to manipulate the request or metadata - before they are sent to the AdPartnerService server. + before they are sent to the TaxonomyCategoryService server. """ return request, metadata - def post_get_ad_partner( - self, response: ad_partner_service.AdPartner - ) -> ad_partner_service.AdPartner: - """Post-rpc interceptor for get_ad_partner + def post_get_taxonomy_category( + self, response: taxonomy_category_messages.TaxonomyCategory + ) -> taxonomy_category_messages.TaxonomyCategory: + """Post-rpc interceptor for get_taxonomy_category Override in a subclass to manipulate the response - after it is returned by the AdPartnerService server but before + after it is returned by the TaxonomyCategoryService server but before it is returned to user code. """ return response - def pre_list_ad_partners( + def pre_list_taxonomy_categories( self, - request: ad_partner_service.ListAdPartnersRequest, + request: taxonomy_category_service.ListTaxonomyCategoriesRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[ad_partner_service.ListAdPartnersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_ad_partners + ) -> Tuple[ + taxonomy_category_service.ListTaxonomyCategoriesRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_taxonomy_categories Override in a subclass to manipulate the request or metadata - before they are sent to the AdPartnerService server. + before they are sent to the TaxonomyCategoryService server. """ return request, metadata - def post_list_ad_partners( - self, response: ad_partner_service.ListAdPartnersResponse - ) -> ad_partner_service.ListAdPartnersResponse: - """Post-rpc interceptor for list_ad_partners + def post_list_taxonomy_categories( + self, response: taxonomy_category_service.ListTaxonomyCategoriesResponse + ) -> taxonomy_category_service.ListTaxonomyCategoriesResponse: + """Post-rpc interceptor for list_taxonomy_categories Override in a subclass to manipulate the response - after it is returned by the AdPartnerService server but before + after it is returned by the TaxonomyCategoryService server but before it is returned to user code. """ return response @@ -141,7 +149,7 @@ def pre_get_operation( """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata - before they are sent to the AdPartnerService server. + before they are sent to the TaxonomyCategoryService server. """ return request, metadata @@ -151,23 +159,23 @@ def post_get_operation( """Post-rpc interceptor for get_operation Override in a subclass to manipulate the response - after it is returned by the AdPartnerService server but before + after it is returned by the TaxonomyCategoryService server but before it is returned to user code. """ return response @dataclasses.dataclass -class AdPartnerServiceRestStub: +class TaxonomyCategoryServiceRestStub: _session: AuthorizedSession _host: str - _interceptor: AdPartnerServiceRestInterceptor + _interceptor: TaxonomyCategoryServiceRestInterceptor -class AdPartnerServiceRestTransport(AdPartnerServiceTransport): - """REST backend transport for AdPartnerService. +class TaxonomyCategoryServiceRestTransport(TaxonomyCategoryServiceTransport): + """REST backend transport for TaxonomyCategoryService. - Provides methods for handling AdPartner objects. + Provides methods for handling ``TaxonomyCategory`` objects. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -189,7 +197,7 @@ def __init__( client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, url_scheme: str = "https", - interceptor: Optional[AdPartnerServiceRestInterceptor] = None, + interceptor: Optional[TaxonomyCategoryServiceRestInterceptor] = None, api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -250,12 +258,12 @@ def __init__( ) if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AdPartnerServiceRestInterceptor() + self._interceptor = interceptor or TaxonomyCategoryServiceRestInterceptor() self._prep_wrapped_messages(client_info) - class _GetAdPartner(AdPartnerServiceRestStub): + class _GetTaxonomyCategory(TaxonomyCategoryServiceRestStub): def __hash__(self): - return hash("GetAdPartner") + return hash("GetTaxonomyCategory") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -269,18 +277,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: ad_partner_service.GetAdPartnerRequest, + request: taxonomy_category_service.GetTaxonomyCategoryRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_partner_service.AdPartner: - r"""Call the get ad partner method over HTTP. + ) -> taxonomy_category_messages.TaxonomyCategory: + r"""Call the get taxonomy category method over HTTP. Args: - request (~.ad_partner_service.GetAdPartnerRequest): - The request object. Request object for GetAdPartner - method. + request (~.taxonomy_category_service.GetTaxonomyCategoryRequest): + The request object. Request object for ``GetTaxonomyCategory`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -288,18 +295,22 @@ def __call__( sent along with the request as metadata. Returns: - ~.ad_partner_service.AdPartner: - The AdPartner resource. + ~.taxonomy_category_messages.TaxonomyCategory: + The ``TaxonomyCategory`` resource. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/adPartners/*}", + "uri": "/v1/{name=networks/*/taxonomyCategories/*}", }, ] - request, metadata = self._interceptor.pre_get_ad_partner(request, metadata) - pb_request = ad_partner_service.GetAdPartnerRequest.pb(request) + request, metadata = self._interceptor.pre_get_taxonomy_category( + request, metadata + ) + pb_request = taxonomy_category_service.GetTaxonomyCategoryRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -332,16 +343,16 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = ad_partner_service.AdPartner() - pb_resp = ad_partner_service.AdPartner.pb(resp) + resp = taxonomy_category_messages.TaxonomyCategory() + pb_resp = taxonomy_category_messages.TaxonomyCategory.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_ad_partner(resp) + resp = self._interceptor.post_get_taxonomy_category(resp) return resp - class _ListAdPartners(AdPartnerServiceRestStub): + class _ListTaxonomyCategories(TaxonomyCategoryServiceRestStub): def __hash__(self): - return hash("ListAdPartners") + return hash("ListTaxonomyCategories") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @@ -355,18 +366,17 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: ad_partner_service.ListAdPartnersRequest, + request: taxonomy_category_service.ListTaxonomyCategoriesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> ad_partner_service.ListAdPartnersResponse: - r"""Call the list ad partners method over HTTP. + ) -> taxonomy_category_service.ListTaxonomyCategoriesResponse: + r"""Call the list taxonomy categories method over HTTP. Args: - request (~.ad_partner_service.ListAdPartnersRequest): - The request object. Request object for ListAdPartners - method. + request (~.taxonomy_category_service.ListTaxonomyCategoriesRequest): + The request object. Request object for ``ListTaxonomyCategories`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -374,23 +384,24 @@ def __call__( sent along with the request as metadata. Returns: - ~.ad_partner_service.ListAdPartnersResponse: - Response object for - ListAdPartnersRequest containing - matching AdPartner resources. + ~.taxonomy_category_service.ListTaxonomyCategoriesResponse: + Response object for ``ListTaxonomyCategoriesRequest`` + containing matching ``TaxonomyCategory`` objects. """ http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{parent=networks/*}/adPartners", + "uri": "/v1/{parent=networks/*}/taxonomyCategories", }, ] - request, metadata = self._interceptor.pre_list_ad_partners( + request, metadata = self._interceptor.pre_list_taxonomy_categories( request, metadata ) - pb_request = ad_partner_service.ListAdPartnersRequest.pb(request) + pb_request = taxonomy_category_service.ListTaxonomyCategoriesRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request["uri"] @@ -423,39 +434,40 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = ad_partner_service.ListAdPartnersResponse() - pb_resp = ad_partner_service.ListAdPartnersResponse.pb(resp) + resp = taxonomy_category_service.ListTaxonomyCategoriesResponse() + pb_resp = taxonomy_category_service.ListTaxonomyCategoriesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_ad_partners(resp) + resp = self._interceptor.post_list_taxonomy_categories(resp) return resp @property - def get_ad_partner( + def get_taxonomy_category( self, ) -> Callable[ - [ad_partner_service.GetAdPartnerRequest], ad_partner_service.AdPartner + [taxonomy_category_service.GetTaxonomyCategoryRequest], + taxonomy_category_messages.TaxonomyCategory, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetAdPartner(self._session, self._host, self._interceptor) # type: ignore + return self._GetTaxonomyCategory(self._session, self._host, self._interceptor) # type: ignore @property - def list_ad_partners( + def list_taxonomy_categories( self, ) -> Callable[ - [ad_partner_service.ListAdPartnersRequest], - ad_partner_service.ListAdPartnersResponse, + [taxonomy_category_service.ListTaxonomyCategoriesRequest], + taxonomy_category_service.ListTaxonomyCategoriesResponse, ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAdPartners(self._session, self._host, self._interceptor) # type: ignore + return self._ListTaxonomyCategories(self._session, self._host, self._interceptor) # type: ignore @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(AdPartnerServiceRestStub): + class _GetOperation(TaxonomyCategoryServiceRestStub): def __call__( self, request: operations_pb2.GetOperationRequest, @@ -482,11 +494,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] @@ -529,4 +541,4 @@ def close(self): self._session.close() -__all__ = ("AdPartnerServiceRestTransport",) +__all__ = ("TaxonomyCategoryServiceRestTransport",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/pagers.py deleted file mode 100644 index 6b5460c35a1a..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/team_service/pagers.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Iterator, - Optional, - Sequence, - Tuple, - Union, -) - -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[ - retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None - ] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.ads.admanager_v1.types import team_service - - -class ListTeamsPager: - """A pager for iterating through ``list_teams`` requests. - - This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListTeamsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``teams`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTeams`` requests and continue to iterate - through the ``teams`` field on the - corresponding responses. - - All the usual :class:`google.ads.admanager_v1.types.ListTeamsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., team_service.ListTeamsResponse], - request: team_service.ListTeamsRequest, - response: team_service.ListTeamsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.ads.admanager_v1.types.ListTeamsRequest): - The initial request object. - response (google.ads.admanager_v1.types.ListTeamsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = team_service.ListTeamsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[team_service.ListTeamsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[team_service.Team]: - for page in self.pages: - yield from page.teams - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py index 9e45779e3a82..f0307b68ac1b 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py @@ -50,8 +50,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.services.user_service import pagers -from google.ads.admanager_v1.types import user_service +from google.ads.admanager_v1.types import user_messages, user_service from .transports.base import DEFAULT_CLIENT_INFO, UserServiceTransport from .transports.rest import UserServiceRestTransport @@ -179,21 +178,6 @@ def transport(self) -> UserServiceTransport: """ return self._transport - @staticmethod - def network_path( - network_code: str, - ) -> str: - """Returns a fully-qualified network string.""" - return "networks/{network_code}".format( - network_code=network_code, - ) - - @staticmethod - def parse_network_path(path: str) -> Dict[str, str]: - """Parses a network path into its component segments.""" - m = re.match(r"^networks/(?P.+?)$", path) - return m.groupdict() if m else {} - @staticmethod def role_path( network_code: str, @@ -705,9 +689,12 @@ def get_user( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> user_service.User: + ) -> user_messages.User: r"""API to retrieve a User object. + To get the current user, the resource name + ``networks/{networkCode}/users/me`` can be used. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -797,123 +784,6 @@ def sample_get_user(): # Done; return the response. return response - def list_users( - self, - request: Optional[Union[user_service.ListUsersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListUsersPager: - r"""API to retrieve a list of User objects. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.ads import admanager_v1 - - def sample_list_users(): - # Create a client - client = admanager_v1.UserServiceClient() - - # Initialize request argument(s) - request = admanager_v1.ListUsersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_users(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.ads.admanager_v1.types.ListUsersRequest, dict]): - The request object. Request object for ListUsers method. - parent (str): - Required. The parent, which owns this collection of - Users. Format: ``networks/{network_code}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.ads.admanager_v1.services.user_service.pagers.ListUsersPager: - Response object for ListUsersRequest - containing matching User resources. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, user_service.ListUsersRequest): - request = user_service.ListUsersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_users] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListUsersPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - def __enter__(self) -> "UserServiceClient": return self diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/pagers.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/pagers.py deleted file mode 100644 index 65a2fe539685..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/pagers.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Iterator, - Optional, - Sequence, - Tuple, - Union, -) - -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[ - retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None - ] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.ads.admanager_v1.types import user_service - - -class ListUsersPager: - """A pager for iterating through ``list_users`` requests. - - This class thinly wraps an initial - :class:`google.ads.admanager_v1.types.ListUsersResponse` object, and - provides an ``__iter__`` method to iterate through its - ``users`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListUsers`` requests and continue to iterate - through the ``users`` field on the - corresponding responses. - - All the usual :class:`google.ads.admanager_v1.types.ListUsersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., user_service.ListUsersResponse], - request: user_service.ListUsersRequest, - response: user_service.ListUsersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.ads.admanager_v1.types.ListUsersRequest): - The initial request object. - response (google.ads.admanager_v1.types.ListUsersResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = user_service.ListUsersRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[user_service.ListUsersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __iter__(self) -> Iterator[user_service.User]: - for page in self.pages: - yield from page.users - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/base.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/base.py index 1212bbbf1c14..bc4ef1797f43 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/base.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/base.py @@ -26,7 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import user_service +from google.ads.admanager_v1.types import user_messages, user_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -134,11 +134,6 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - self.list_users: gapic_v1.method.wrap_method( - self.list_users, - default_timeout=None, - client_info=client_info, - ), } def close(self): @@ -155,18 +150,7 @@ def get_user( self, ) -> Callable[ [user_service.GetUserRequest], - Union[user_service.User, Awaitable[user_service.User]], - ]: - raise NotImplementedError() - - @property - def list_users( - self, - ) -> Callable[ - [user_service.ListUsersRequest], - Union[ - user_service.ListUsersResponse, Awaitable[user_service.ListUsersResponse] - ], + Union[user_messages.User, Awaitable[user_messages.User]], ]: raise NotImplementedError() diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/rest.py index 616531e7d102..7ec694c9237b 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/rest.py @@ -38,7 +38,7 @@ from google.longrunning import operations_pb2 # type: ignore -from google.ads.admanager_v1.types import user_service +from google.ads.admanager_v1.types import user_messages, user_service from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO from .base import UserServiceTransport @@ -73,14 +73,6 @@ def post_get_user(self, response): logging.log(f"Received response: {response}") return response - def pre_list_users(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_users(self, response): - logging.log(f"Received response: {response}") - return response - transport = UserServiceRestTransport(interceptor=MyCustomUserServiceInterceptor()) client = UserServiceClient(transport=transport) @@ -97,7 +89,7 @@ def pre_get_user( """ return request, metadata - def post_get_user(self, response: user_service.User) -> user_service.User: + def post_get_user(self, response: user_messages.User) -> user_messages.User: """Post-rpc interceptor for get_user Override in a subclass to manipulate the response @@ -106,29 +98,6 @@ def post_get_user(self, response: user_service.User) -> user_service.User: """ return response - def pre_list_users( - self, - request: user_service.ListUsersRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[user_service.ListUsersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_users - - Override in a subclass to manipulate the request or metadata - before they are sent to the UserService server. - """ - return request, metadata - - def post_list_users( - self, response: user_service.ListUsersResponse - ) -> user_service.ListUsersResponse: - """Post-rpc interceptor for list_users - - Override in a subclass to manipulate the response - after it is returned by the UserService server but before - it is returned to user code. - """ - return response - def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -270,7 +239,7 @@ def __call__( retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> user_service.User: + ) -> user_messages.User: r"""Call the get user method over HTTP. Args: @@ -283,7 +252,7 @@ def __call__( sent along with the request as metadata. Returns: - ~.user_service.User: + ~.user_messages.User: The User resource. """ @@ -327,114 +296,19 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = user_service.User() - pb_resp = user_service.User.pb(resp) + resp = user_messages.User() + pb_resp = user_messages.User.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_user(resp) return resp - class _ListUsers(UserServiceRestStub): - def __hash__(self): - return hash("ListUsers") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } - - def __call__( - self, - request: user_service.ListUsersRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> user_service.ListUsersResponse: - r"""Call the list users method over HTTP. - - Args: - request (~.user_service.ListUsersRequest): - The request object. Request object for ListUsers method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.user_service.ListUsersResponse: - Response object for ListUsersRequest - containing matching User resources. - - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=networks/*}/users", - }, - ] - request, metadata = self._interceptor.pre_list_users(request, metadata) - pb_request = user_service.ListUsersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) - ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = user_service.ListUsersResponse() - pb_resp = user_service.ListUsersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_users(resp) - return resp - @property - def get_user(self) -> Callable[[user_service.GetUserRequest], user_service.User]: + def get_user(self) -> Callable[[user_service.GetUserRequest], user_messages.User]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetUser(self._session, self._host, self._interceptor) # type: ignore - @property - def list_users( - self, - ) -> Callable[[user_service.ListUsersRequest], user_service.ListUsersResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListUsers(self._session, self._host, self._interceptor) # type: ignore - @property def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore @@ -466,11 +340,11 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/exports/*}", + "uri": "/v1/{name=networks/*/operations/reports/runs/*}", }, { "method": "get", - "uri": "/v1/{name=networks/*/operations/reports/runs/*}", + "uri": "/v1/{name=networks/*/operations/reports/exports/*}", }, ] diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/__init__.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/__init__.py index 36867842d446..79767e5a1d19 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/__init__.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/__init__.py @@ -13,69 +13,46 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .ad_partner_declaration import AdPartnerDeclaration, DeclarationTypeEnum -from .ad_partner_service import ( - AdPartner, - GetAdPartnerRequest, - ListAdPartnersRequest, - ListAdPartnersResponse, -) -from .ad_unit_enums import AppliedAdsenseEnabledEnum +from .ad_unit_enums import AdUnitStatusEnum, SmartSizeModeEnum, TargetWindowEnum +from .ad_unit_messages import AdUnit, AdUnitParent, AdUnitSize, LabelFrequencyCap from .ad_unit_service import ( - AdUnit, - AdUnitParent, GetAdUnitRequest, - LabelFrequencyCap, + ListAdUnitSizesRequest, + ListAdUnitSizesResponse, ListAdUnitsRequest, ListAdUnitsResponse, - SmartSizeModeEnum, - TargetWindowEnum, ) -from .ad_unit_size import AdUnitSize from .admanager_error import AdManagerError from .applied_label import AppliedLabel from .company_credit_status_enum import CompanyCreditStatusEnum +from .company_messages import Company from .company_service import ( - Company, GetCompanyRequest, ListCompaniesRequest, ListCompaniesResponse, ) from .company_type_enum import CompanyTypeEnum -from .computed_status_enum import ComputedStatusEnum -from .contact_service import ( - Contact, - GetContactRequest, - ListContactsRequest, - ListContactsResponse, -) -from .creative_placeholder import CreativePlaceholder -from .creative_service import ( - Creative, - GetCreativeRequest, - ListCreativesRequest, - ListCreativesResponse, -) +from .contact_messages import Contact from .custom_field_enums import ( CustomFieldDataTypeEnum, CustomFieldEntityTypeEnum, CustomFieldStatusEnum, CustomFieldVisibilityEnum, ) +from .custom_field_messages import CustomField, CustomFieldOption from .custom_field_service import ( - CustomField, - CustomFieldOption, GetCustomFieldRequest, ListCustomFieldsRequest, ListCustomFieldsResponse, ) +from .custom_field_value import CustomFieldValue from .custom_targeting_key_enums import ( CustomTargetingKeyReportableTypeEnum, CustomTargetingKeyStatusEnum, CustomTargetingKeyTypeEnum, ) +from .custom_targeting_key_messages import CustomTargetingKey from .custom_targeting_key_service import ( - CustomTargetingKey, GetCustomTargetingKeyRequest, ListCustomTargetingKeysRequest, ListCustomTargetingKeysResponse, @@ -84,67 +61,89 @@ CustomTargetingValueMatchTypeEnum, CustomTargetingValueStatusEnum, ) +from .custom_targeting_value_messages import CustomTargetingValue from .custom_targeting_value_service import ( - CustomTargetingValue, GetCustomTargetingValueRequest, ListCustomTargetingValuesRequest, ListCustomTargetingValuesResponse, ) -from .environment_type_enum import EnvironmentTypeEnum -from .frequency_cap import FrequencyCap, TimeUnitEnum -from .goal import Goal, GoalTypeEnum, UnitTypeEnum -from .label_service import GetLabelRequest, Label, ListLabelsRequest, ListLabelsResponse -from .line_item_enums import ( - CreativeRotationTypeEnum, - DeliveryRateTypeEnum, - LineItemCostTypeEnum, - LineItemDiscountTypeEnum, - LineItemTypeEnum, - ReservationStatusEnum, +from .entity_signals_mapping_messages import EntitySignalsMapping +from .entity_signals_mapping_service import ( + BatchCreateEntitySignalsMappingsRequest, + BatchCreateEntitySignalsMappingsResponse, + BatchUpdateEntitySignalsMappingsRequest, + BatchUpdateEntitySignalsMappingsResponse, + CreateEntitySignalsMappingRequest, + GetEntitySignalsMappingRequest, + ListEntitySignalsMappingsRequest, + ListEntitySignalsMappingsResponse, + UpdateEntitySignalsMappingRequest, ) -from .line_item_service import ( - GetLineItemRequest, - LineItem, - ListLineItemsRequest, - ListLineItemsResponse, -) -from .network_service import GetNetworkRequest, Network -from .order_service import GetOrderRequest, ListOrdersRequest, ListOrdersResponse, Order +from .environment_type_enum import EnvironmentTypeEnum +from .frequency_cap import FrequencyCap +from .label_messages import Label +from .network_messages import Network +from .network_service import ( + GetNetworkRequest, + ListNetworksRequest, + ListNetworksResponse, +) +from .order_enums import OrderStatusEnum +from .order_messages import Order +from .order_service import GetOrderRequest, ListOrdersRequest, ListOrdersResponse from .placement_enums import PlacementStatusEnum +from .placement_messages import Placement from .placement_service import ( GetPlacementRequest, ListPlacementsRequest, ListPlacementsResponse, - Placement, ) from .report_service import ( - ExportSavedReportMetadata, - ExportSavedReportRequest, - ExportSavedReportResponse, + CreateReportRequest, + FetchReportResultRowsRequest, + FetchReportResultRowsResponse, + GetReportRequest, + ListReportsRequest, + ListReportsResponse, Report, -) -from .role_service import GetRoleRequest, ListRolesRequest, ListRolesResponse, Role -from .size import Size, SizeTypeEnum -from .team_service import GetTeamRequest, ListTeamsRequest, ListTeamsResponse, Team -from .user_service import GetUserRequest, ListUsersRequest, ListUsersResponse, User + ReportDefinition, + RunReportMetadata, + RunReportRequest, + RunReportResponse, + Schedule, + ScheduleOptions, + UpdateReportRequest, +) +from .role_enums import RoleStatusEnum +from .role_messages import Role +from .role_service import GetRoleRequest, ListRolesRequest, ListRolesResponse +from .size import Size +from .size_type_enum import SizeTypeEnum +from .taxonomy_category_messages import TaxonomyCategory +from .taxonomy_category_service import ( + GetTaxonomyCategoryRequest, + ListTaxonomyCategoriesRequest, + ListTaxonomyCategoriesResponse, +) +from .taxonomy_type_enum import TaxonomyTypeEnum +from .team_messages import Team +from .time_unit_enum import TimeUnitEnum +from .user_messages import User +from .user_service import GetUserRequest __all__ = ( - "AdPartnerDeclaration", - "DeclarationTypeEnum", - "AdPartner", - "GetAdPartnerRequest", - "ListAdPartnersRequest", - "ListAdPartnersResponse", - "AppliedAdsenseEnabledEnum", + "AdUnitStatusEnum", + "SmartSizeModeEnum", + "TargetWindowEnum", "AdUnit", "AdUnitParent", - "GetAdUnitRequest", + "AdUnitSize", "LabelFrequencyCap", + "GetAdUnitRequest", + "ListAdUnitSizesRequest", + "ListAdUnitSizesResponse", "ListAdUnitsRequest", "ListAdUnitsResponse", - "SmartSizeModeEnum", - "TargetWindowEnum", - "AdUnitSize", "AdManagerError", "AppliedLabel", "CompanyCreditStatusEnum", @@ -153,16 +152,7 @@ "ListCompaniesRequest", "ListCompaniesResponse", "CompanyTypeEnum", - "ComputedStatusEnum", "Contact", - "GetContactRequest", - "ListContactsRequest", - "ListContactsResponse", - "CreativePlaceholder", - "Creative", - "GetCreativeRequest", - "ListCreativesRequest", - "ListCreativesResponse", "CustomFieldDataTypeEnum", "CustomFieldEntityTypeEnum", "CustomFieldStatusEnum", @@ -172,6 +162,7 @@ "GetCustomFieldRequest", "ListCustomFieldsRequest", "ListCustomFieldsResponse", + "CustomFieldValue", "CustomTargetingKeyReportableTypeEnum", "CustomTargetingKeyStatusEnum", "CustomTargetingKeyTypeEnum", @@ -185,53 +176,61 @@ "GetCustomTargetingValueRequest", "ListCustomTargetingValuesRequest", "ListCustomTargetingValuesResponse", + "EntitySignalsMapping", + "BatchCreateEntitySignalsMappingsRequest", + "BatchCreateEntitySignalsMappingsResponse", + "BatchUpdateEntitySignalsMappingsRequest", + "BatchUpdateEntitySignalsMappingsResponse", + "CreateEntitySignalsMappingRequest", + "GetEntitySignalsMappingRequest", + "ListEntitySignalsMappingsRequest", + "ListEntitySignalsMappingsResponse", + "UpdateEntitySignalsMappingRequest", "EnvironmentTypeEnum", "FrequencyCap", - "TimeUnitEnum", - "Goal", - "GoalTypeEnum", - "UnitTypeEnum", - "GetLabelRequest", "Label", - "ListLabelsRequest", - "ListLabelsResponse", - "CreativeRotationTypeEnum", - "DeliveryRateTypeEnum", - "LineItemCostTypeEnum", - "LineItemDiscountTypeEnum", - "LineItemTypeEnum", - "ReservationStatusEnum", - "GetLineItemRequest", - "LineItem", - "ListLineItemsRequest", - "ListLineItemsResponse", - "GetNetworkRequest", "Network", + "GetNetworkRequest", + "ListNetworksRequest", + "ListNetworksResponse", + "OrderStatusEnum", + "Order", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", - "Order", "PlacementStatusEnum", + "Placement", "GetPlacementRequest", "ListPlacementsRequest", "ListPlacementsResponse", - "Placement", - "ExportSavedReportMetadata", - "ExportSavedReportRequest", - "ExportSavedReportResponse", + "CreateReportRequest", + "FetchReportResultRowsRequest", + "FetchReportResultRowsResponse", + "GetReportRequest", + "ListReportsRequest", + "ListReportsResponse", "Report", + "ReportDefinition", + "RunReportMetadata", + "RunReportRequest", + "RunReportResponse", + "Schedule", + "ScheduleOptions", + "UpdateReportRequest", + "RoleStatusEnum", + "Role", "GetRoleRequest", "ListRolesRequest", "ListRolesResponse", - "Role", "Size", "SizeTypeEnum", - "GetTeamRequest", - "ListTeamsRequest", - "ListTeamsResponse", + "TaxonomyCategory", + "GetTaxonomyCategoryRequest", + "ListTaxonomyCategoriesRequest", + "ListTaxonomyCategoriesResponse", + "TaxonomyTypeEnum", "Team", - "GetUserRequest", - "ListUsersRequest", - "ListUsersResponse", + "TimeUnitEnum", "User", + "GetUserRequest", ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_declaration.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_declaration.py deleted file mode 100644 index 23d969b2e272..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_declaration.py +++ /dev/null @@ -1,80 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "AdPartnerDeclaration", - "DeclarationTypeEnum", - }, -) - - -class AdPartnerDeclaration(proto.Message): - r"""Represents a set of declarations about what (if any) ad - partners are associated with a given creative. This can be set - at the network level, as a default for all creatives, or - overridden for a particular creative. - - Attributes: - type_ (google.ads.admanager_v1.types.DeclarationTypeEnum.DeclarationType): - They type of declaration. - ad_partners (MutableSequence[str]): - The resource names of AdPartners being declared. Format: - "networks/{network_code}/adPartners/{ad_partner_id}". - """ - - type_: "DeclarationTypeEnum.DeclarationType" = proto.Field( - proto.ENUM, - number=1, - enum="DeclarationTypeEnum.DeclarationType", - ) - ad_partners: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -class DeclarationTypeEnum(proto.Message): - r"""Wrapper message for - [DeclarationTypeEnum][google.ads.admanager.v1.DeclarationTypeEnum]. - - """ - - class DeclarationType(proto.Enum): - r"""The declaration about third party data usage on the - associated entity. - - Values: - DECLARATION_TYPE_UNSPECIFIED (0): - Default value. This value is unused. - NONE (1): - No ad technology providers to declare. - DECLARED (2): - There are are ad technology providers to - declare on this entity. - """ - DECLARATION_TYPE_UNSPECIFIED = 0 - NONE = 1 - DECLARED = 2 - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_enums.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_enums.py index 31ddc4b2a8fe..6ab55de981f0 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_enums.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_enums.py @@ -22,36 +22,91 @@ __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "AppliedAdsenseEnabledEnum", + "AdUnitStatusEnum", + "SmartSizeModeEnum", + "TargetWindowEnum", }, ) -class AppliedAdsenseEnabledEnum(proto.Message): +class AdUnitStatusEnum(proto.Message): r"""Wrapper message for - [AppliedAdsenseEnabled][google.ads.admanager.v1.AppliedAdsenseEnabledEnum.AppliedAdsenseEnabled] + [AdUnitStatus][google.ads.admanager.v1.AdUnitStatusEnum.AdUnitStatus] """ - class AppliedAdsenseEnabled(proto.Enum): - r"""Specifies if serving ads from the AdSense content network is - enabled. + class AdUnitStatus(proto.Enum): + r"""The status of an AdUnit. Values: - APPLIED_ADSENSE_ENABLED_UNSPECIFIED (0): - No adsense enabled setting applied directly; - value will be inherited from parent or system - default. - TRUE (1): - Serving ads from AdSense content network is - enabled. - FALSE (2): - Serving ads from AdSense content network is - disabled. + AD_UNIT_STATUS_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + The ad unit is active, available for + targeting, and serving. + INACTIVE (2): + The ad unit will be visible in the UI, but + ignored by serving. + ARCHIVED (3): + The ad unit will be hidden in the UI and + ignored by serving. """ - APPLIED_ADSENSE_ENABLED_UNSPECIFIED = 0 - TRUE = 1 - FALSE = 2 + AD_UNIT_STATUS_UNSPECIFIED = 0 + ACTIVE = 1 + INACTIVE = 2 + ARCHIVED = 3 + + +class SmartSizeModeEnum(proto.Message): + r"""Wrapper message for + [SmartSizeMode][google.ads.admanager.v1.SmartSizeModeEnum.SmartSizeMode]. + + """ + + class SmartSizeMode(proto.Enum): + r"""The smart size mode for this ad unit. This attribute is + optional and defaults to SmartSizeMode.NONE for fixed sizes. + + Values: + SMART_SIZE_MODE_UNSPECIFIED (0): + Default value. This value is unused. + NONE (1): + Fixed size mode (default). + SMART_BANNER (2): + The height is fixed for the request, the + width is a range. + DYNAMIC_SIZE (3): + Height and width are ranges. + """ + SMART_SIZE_MODE_UNSPECIFIED = 0 + NONE = 1 + SMART_BANNER = 2 + DYNAMIC_SIZE = 3 + + +class TargetWindowEnum(proto.Message): + r"""Wrapper message for + [TargetWindow][google.ads.admanager.v1.TargetWindowEnum.TargetWindow]. + + """ + + class TargetWindow(proto.Enum): + r"""Corresponds to an HTML link's target attribute. + See http://www.w3.org/TR/html401/present/frames.html#adef-target + + Values: + TARGET_WINDOW_UNSPECIFIED (0): + Default value. This value is unused. + TOP (1): + Specifies that the link should open in the + full body of the page. + BLANK (2): + Specifies that the link should open in a new + window. + """ + TARGET_WINDOW_UNSPECIFIED = 0 + TOP = 1 + BLANK = 2 __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_messages.py new file mode 100644 index 000000000000..85d82af952a2 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_messages.py @@ -0,0 +1,369 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.ads.admanager_v1.types import ( + ad_unit_enums, + applied_label, + environment_type_enum, +) +from google.ads.admanager_v1.types import frequency_cap as gaa_frequency_cap +from google.ads.admanager_v1.types import size as gaa_size + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "AdUnit", + "AdUnitSize", + "AdUnitParent", + "LabelFrequencyCap", + }, +) + + +class AdUnit(proto.Message): + r"""The AdUnit resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the AdUnit. Format: + ``networks/{network_code}/adUnits/{ad_unit_id}`` + ad_unit_id (int): + Output only. AdUnit ID. + parent_ad_unit (str): + Required. Immutable. The AdUnit's parent. Every ad unit has + a parent except for the root ad unit, which is created by + Google. Format: + "networks/{network_code}/adUnits/{ad_unit_id}". + parent_path (MutableSequence[google.ads.admanager_v1.types.AdUnitParent]): + Output only. The path to this AdUnit in the + ad unit hierarchy represented as a list from the + root to this ad unit's parent. For root ad + units, this list is empty. + display_name (str): + Required. The display name of the ad unit. + Its maximum length is 255 characters. + ad_unit_code (str): + Immutable. A string used to uniquely identify + the ad unit for the purposes of serving the ad. + This attribute is optional and can be set during + ad unit creation. If it is not provided, it will + be assigned by Google based on the ad unit ID. + status (google.ads.admanager_v1.types.AdUnitStatusEnum.AdUnitStatus): + Output only. The status of this ad unit. It + defaults to ACTIVE. + applied_target_window (google.ads.admanager_v1.types.TargetWindowEnum.TargetWindow): + Optional. The target window directly applied + to this AdUnit. If this field is not set, this + AdUnit uses the target window specified in + effectiveTargetWindow. + effective_target_window (google.ads.admanager_v1.types.TargetWindowEnum.TargetWindow): + Output only. Non-empty default. The target + window of this AdUnit. This value is inherited + from ancestor AdUnits and defaults to TOP if no + AdUnit in the hierarchy specifies it. + applied_teams (MutableSequence[str]): + Optional. The resource names of Teams directly applied to + this AdUnit. Format: + "networks/{network_code}/teams/{team_id}". + teams (MutableSequence[str]): + Output only. The resource names of all Teams that this + AdUnit is on as well as those inherited from parent AdUnits. + Format: "networks/{network_code}/teams/{team_id}". + description (str): + Optional. A description of the ad unit. The + maximum length is 65,535 characters. + explicitly_targeted (bool): + Optional. If this field is set to true, then + the AdUnit will not be implicitly targeted when + its parent is. Traffickers must explicitly + target such an AdUnit or else no line items will + serve to it. This feature is only available for + Ad Manager 360 accounts. + has_children (bool): + Output only. This field is set to true if the + ad unit has any children. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time this AdUnit was last + modified. + ad_unit_sizes (MutableSequence[google.ads.admanager_v1.types.AdUnitSize]): + Optional. The sizes that can be served inside + this ad unit. + external_set_top_box_channel_id (str): + Optional. Determines what set top box video + on demand channel this ad unit corresponds to in + an external set top box ad campaign system. + refresh_delay (google.protobuf.duration_pb2.Duration): + Optional. The duration after which an Ad Unit + will automatically refresh. This is only valid + for ad units in mobile apps. If not set, the ad + unit will not refresh. + applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): + Optional. The set of labels applied directly + to this ad unit. + effective_applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): + Output only. Contains the set of labels + applied directly to the ad unit as well as those + inherited from the parent ad units. If a label + has been negated, only the negated label is + returned. This field is readonly and is assigned + by Google. + applied_label_frequency_caps (MutableSequence[google.ads.admanager_v1.types.LabelFrequencyCap]): + Optional. The set of label frequency caps + applied directly to this ad unit. There is a + limit of 10 label frequency caps per ad unit. + effective_label_frequency_caps (MutableSequence[google.ads.admanager_v1.types.LabelFrequencyCap]): + Output only. The label frequency caps applied + directly to the ad unit as well as those + inherited from parent ad units. + smart_size_mode (google.ads.admanager_v1.types.SmartSizeModeEnum.SmartSizeMode): + Optional. The smart size mode for this ad + unit. This attribute is optional and defaults to + SmartSizeMode.NONE for fixed sizes. + applied_adsense_enabled (bool): + Optional. The value of AdSense enabled + directly applied to this ad unit. This attribute + is optional and if not specified this ad unit + will inherit the value of + effectiveAdsenseEnabled from its ancestors. + + This field is a member of `oneof`_ ``_applied_adsense_enabled``. + effective_adsense_enabled (bool): + Output only. Specifies whether or not the + AdUnit is enabled for serving ads from the + AdSense content network. This attribute defaults + to the ad unit's parent or ancestor's setting if + one has been set. If no ancestor of the ad unit + has set appliedAdsenseEnabled, the attribute is + defaulted to true. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + ad_unit_id: int = proto.Field( + proto.INT64, + number=15, + ) + parent_ad_unit: str = proto.Field( + proto.STRING, + number=10, + ) + parent_path: MutableSequence["AdUnitParent"] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="AdUnitParent", + ) + display_name: str = proto.Field( + proto.STRING, + number=9, + ) + ad_unit_code: str = proto.Field( + proto.STRING, + number=2, + ) + status: ad_unit_enums.AdUnitStatusEnum.AdUnitStatus = proto.Field( + proto.ENUM, + number=13, + enum=ad_unit_enums.AdUnitStatusEnum.AdUnitStatus, + ) + applied_target_window: ad_unit_enums.TargetWindowEnum.TargetWindow = proto.Field( + proto.ENUM, + number=44, + enum=ad_unit_enums.TargetWindowEnum.TargetWindow, + ) + effective_target_window: ad_unit_enums.TargetWindowEnum.TargetWindow = proto.Field( + proto.ENUM, + number=45, + enum=ad_unit_enums.TargetWindowEnum.TargetWindow, + ) + applied_teams: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + teams: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + explicitly_targeted: bool = proto.Field( + proto.BOOL, + number=6, + ) + has_children: bool = proto.Field( + proto.BOOL, + number=7, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + ad_unit_sizes: MutableSequence["AdUnitSize"] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="AdUnitSize", + ) + external_set_top_box_channel_id: str = proto.Field( + proto.STRING, + number=17, + ) + refresh_delay: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=19, + message=duration_pb2.Duration, + ) + applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( + proto.MESSAGE, + number=21, + message=applied_label.AppliedLabel, + ) + effective_applied_labels: MutableSequence[ + applied_label.AppliedLabel + ] = proto.RepeatedField( + proto.MESSAGE, + number=22, + message=applied_label.AppliedLabel, + ) + applied_label_frequency_caps: MutableSequence[ + "LabelFrequencyCap" + ] = proto.RepeatedField( + proto.MESSAGE, + number=23, + message="LabelFrequencyCap", + ) + effective_label_frequency_caps: MutableSequence[ + "LabelFrequencyCap" + ] = proto.RepeatedField( + proto.MESSAGE, + number=24, + message="LabelFrequencyCap", + ) + smart_size_mode: ad_unit_enums.SmartSizeModeEnum.SmartSizeMode = proto.Field( + proto.ENUM, + number=25, + enum=ad_unit_enums.SmartSizeModeEnum.SmartSizeMode, + ) + applied_adsense_enabled: bool = proto.Field( + proto.BOOL, + number=26, + optional=True, + ) + effective_adsense_enabled: bool = proto.Field( + proto.BOOL, + number=27, + ) + + +class AdUnitSize(proto.Message): + r"""Represents the size, environment, and companions of an ad in + an ad unit. + + Attributes: + size (google.ads.admanager_v1.types.Size): + Required. The Size of the AdUnit. + environment_type (google.ads.admanager_v1.types.EnvironmentTypeEnum.EnvironmentType): + Required. The EnvironmentType of the AdUnit + companions (MutableSequence[google.ads.admanager_v1.types.Size]): + The companions for this ad unit size. Companions are only + valid if the environment is + [VIDEO_PLAYER][google.ads.admanager.v1.EnvironmentTypeEnum.EnvironmentType]. + """ + + size: gaa_size.Size = proto.Field( + proto.MESSAGE, + number=1, + message=gaa_size.Size, + ) + environment_type: environment_type_enum.EnvironmentTypeEnum.EnvironmentType = ( + proto.Field( + proto.ENUM, + number=2, + enum=environment_type_enum.EnvironmentTypeEnum.EnvironmentType, + ) + ) + companions: MutableSequence[gaa_size.Size] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=gaa_size.Size, + ) + + +class AdUnitParent(proto.Message): + r"""The summary of a parent AdUnit. + + Attributes: + parent_ad_unit (str): + Output only. The parent of the current AdUnit Format: + ``networks/{network_code}/adUnits/{ad_unit_id}`` + display_name (str): + Output only. The display name of the parent + AdUnit. + ad_unit_code (str): + Output only. A string used to uniquely + identify the ad unit for the purposes of serving + the ad. + """ + + parent_ad_unit: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + ad_unit_code: str = proto.Field( + proto.STRING, + number=3, + ) + + +class LabelFrequencyCap(proto.Message): + r"""Frequency cap using a label. + + Attributes: + label (str): + Required. The label to used for frequency capping. Format: + "networks/{network_code}/labels/{label_id}". + frequency_cap (google.ads.admanager_v1.types.FrequencyCap): + The frequency cap. + """ + + label: str = proto.Field( + proto.STRING, + number=1, + ) + frequency_cap: gaa_frequency_cap.FrequencyCap = proto.Field( + proto.MESSAGE, + number=2, + message=gaa_frequency_cap.FrequencyCap, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_service.py index 1150d9f25c0c..6e87bb669a0e 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_service.py @@ -17,422 +17,163 @@ from typing import MutableMapping, MutableSequence -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore -from google.ads.admanager_v1.types import ad_unit_enums, ad_unit_size, applied_label -from google.ads.admanager_v1.types import frequency_cap as gaa_frequency_cap +from google.ads.admanager_v1.types import ad_unit_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "AdUnit", - "AdUnitParent", - "TargetWindowEnum", - "LabelFrequencyCap", - "SmartSizeModeEnum", "GetAdUnitRequest", "ListAdUnitsRequest", "ListAdUnitsResponse", + "ListAdUnitSizesRequest", + "ListAdUnitSizesResponse", }, ) -class AdUnit(proto.Message): - r"""The AdUnit resource. +class GetAdUnitRequest(proto.Message): + r"""Request object for GetAdUnit method. Attributes: name (str): - Identifier. The resource name of the AdUnit. Format: + Required. The resource name of the AdUnit. Format: ``networks/{network_code}/adUnits/{ad_unit_id}`` - ad_unit_id (int): - Output only. AdUnit ID. - parent_ad_unit (str): - Required. Immutable. The AdUnit's parent. Every ad unit has - a parent except for the root ad unit, which is created by - Google. Format: - "networks/{network_code}/adUnits/{ad_unit_id}". - parent_path (MutableSequence[google.ads.admanager_v1.types.AdUnitParent]): - Output only. The path to this AdUnit in the - ad unit hierarchy represented as a list from the - root to this ad unit's parent. For root ad - units, this list is empty. - display_name (str): - Required. The display name of the ad unit. - Its maximum length is 255 characters. - ad_unit_code (str): - Immutable. A string used to uniquely identify - the ad unit for the purposes of serving the ad. - This attribute is optional and can be set during - ad unit creation. If it is not provided, it will - be assigned by Google based off of the ad unit - ID. - status (google.ads.admanager_v1.types.AdUnit.Status): - Output only. The status of this ad unit. It - defaults to ACTIVE. - target_window (google.ads.admanager_v1.types.TargetWindowEnum.TargetWindow): - Non-empty default. The value to use for the - HTML link's target attribute. This value will be - interpreted as TOP if left blank. - applied_teams (MutableSequence[str]): - Optional. The resource names of Teams directly applied to - this AdUnit. Format: - "networks/{network_code}/teams/{team_id}". - teams (MutableSequence[str]): - Output only. The resource names of all Teams that this - AdUnit is on as well as those inherited from parent AdUnits. - Format: "networks/{network_code}/teams/{team_id}". - description (str): - Optional. A description of the ad unit. The - maximum length is 65,535 characters. - explicitly_targeted (bool): - Optional. If this field is set to true, then - the AdUnit will not be implicitly targeted when - its parent is. Traffickers must explicitly - target such an AdUnit or else no line items will - serve to it. This feature is only available for - Ad Manager 360 accounts. - has_children (bool): - Output only. This field is set to true if the - ad unit has any children. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant this AdUnit was last - modified. - ad_unit_sizes (MutableSequence[google.ads.admanager_v1.types.AdUnitSize]): - Optional. The sizes that can be served inside - this ad unit. - external_set_top_box_channel_id (str): - Optional. Determines what set top box video - on demand channel this ad unit corresponds to in - an external set top box ad campaign system. - refresh_delay (google.protobuf.duration_pb2.Duration): - Optional. The duration after which an Ad Unit - will automatically refresh. This is only valid - for ad units in mobile apps. If not set, the ad - unit will not refresh. - ctv_application_id (int): - Optional. The ID of the CTV application that - this ad unit is within. - applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): - Optional. The set of labels applied directly - to this ad unit. - effective_applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): - Output only. Contains the set of labels - applied directly to the ad unit as well as those - inherited from the parent ad units. If a label - has been negated, only the negated label is - returned. This field is readonly and is assigned - by Google. - applied_label_frequency_caps (MutableSequence[google.ads.admanager_v1.types.LabelFrequencyCap]): - Optional. The set of label frequency caps - applied directly to this ad unit. There is a - limit of 10 label frequency caps per ad unit. - effective_label_frequency_caps (MutableSequence[google.ads.admanager_v1.types.LabelFrequencyCap]): - Output only. The label frequency caps applied - directly to the ad unit as well as those - inherited from parent ad units. - smart_size_mode (google.ads.admanager_v1.types.SmartSizeModeEnum.SmartSizeMode): - Optional. The smart size mode for this ad - unit. This attribute is optional and defaults to - SmartSizeMode.NONE for fixed sizes. - applied_adsense_enabled (google.ads.admanager_v1.types.AppliedAdsenseEnabledEnum.AppliedAdsenseEnabled): - Optional. The value of AdSense enabled - directly applied to this ad unit. This attribute - is optional and if not specified this ad unit - will inherit the value of - effectiveAdsenseEnabled from its ancestors. - effective_adsense_enabled (bool): - Output only. Specifies whether or not the - AdUnit is enabled for serving ads from the - AdSense content network. This attribute defaults - to the ad unit's parent or ancestor's setting if - one has been set. If no ancestor of the ad unit - has set appliedAdsenseEnabled, the attribute is - defaulted to true. """ - class Status(proto.Enum): - r"""The status of an AdUnit. - - Values: - STATUS_UNSPECIFIED (0): - Default value. This value is unused. - ACTIVE (1): - The ad unit is active, available for - targeting, and serving. - INACTIVE (2): - The ad unit will be visible in the UI, but - ignored by serving. - ARCHIVED (3): - The ad unit will be hidden in the UI and - ignored by serving. - """ - STATUS_UNSPECIFIED = 0 - ACTIVE = 1 - INACTIVE = 2 - ARCHIVED = 3 - name: str = proto.Field( proto.STRING, number=1, ) - ad_unit_id: int = proto.Field( - proto.INT64, - number=15, - ) - parent_ad_unit: str = proto.Field( - proto.STRING, - number=10, - ) - parent_path: MutableSequence["AdUnitParent"] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message="AdUnitParent", - ) - display_name: str = proto.Field( + + +class ListAdUnitsRequest(proto.Message): + r"""Request object for ListAdUnits method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of AdUnits. + Format: ``networks/{network_code}`` + page_size (int): + Optional. The maximum number of AdUnits to + return. The service may return fewer than this + value. If unspecified, at most 50 ad units will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListAdUnits`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAdUnits`` must match the call that provided the page + token. + filter (str): + Optional. Expression to filter the response. + See syntax details at + https://developers.google.com/ad-manager/api/beta/filters + order_by (str): + Optional. Expression to specify sorting + order. See syntax details at + https://developers.google.com/ad-manager/api/beta/filters#order + skip (int): + Optional. Number of individual resources to + skip while paginating. + """ + + parent: str = proto.Field( proto.STRING, - number=9, + number=1, ) - ad_unit_code: str = proto.Field( - proto.STRING, + page_size: int = proto.Field( + proto.INT32, number=2, ) - status: Status = proto.Field( - proto.ENUM, - number=13, - enum=Status, - ) - target_window: "TargetWindowEnum.TargetWindow" = proto.Field( - proto.ENUM, - number=12, - enum="TargetWindowEnum.TargetWindow", - ) - applied_teams: MutableSequence[str] = proto.RepeatedField( + page_token: str = proto.Field( proto.STRING, number=3, ) - teams: MutableSequence[str] = proto.RepeatedField( + filter: str = proto.Field( proto.STRING, number=4, ) - description: str = proto.Field( + order_by: str = proto.Field( proto.STRING, number=5, ) - explicitly_targeted: bool = proto.Field( - proto.BOOL, + skip: int = proto.Field( + proto.INT32, number=6, ) - has_children: bool = proto.Field( - proto.BOOL, - number=7, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - ad_unit_sizes: MutableSequence[ad_unit_size.AdUnitSize] = proto.RepeatedField( - proto.MESSAGE, - number=14, - message=ad_unit_size.AdUnitSize, - ) - external_set_top_box_channel_id: str = proto.Field( - proto.STRING, - number=17, - ) - refresh_delay: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=19, - message=duration_pb2.Duration, - ) - ctv_application_id: int = proto.Field( - proto.INT64, - number=20, - ) - applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( - proto.MESSAGE, - number=21, - message=applied_label.AppliedLabel, - ) - effective_applied_labels: MutableSequence[ - applied_label.AppliedLabel - ] = proto.RepeatedField( - proto.MESSAGE, - number=22, - message=applied_label.AppliedLabel, - ) - applied_label_frequency_caps: MutableSequence[ - "LabelFrequencyCap" - ] = proto.RepeatedField( - proto.MESSAGE, - number=23, - message="LabelFrequencyCap", - ) - effective_label_frequency_caps: MutableSequence[ - "LabelFrequencyCap" - ] = proto.RepeatedField( - proto.MESSAGE, - number=24, - message="LabelFrequencyCap", - ) - smart_size_mode: "SmartSizeModeEnum.SmartSizeMode" = proto.Field( - proto.ENUM, - number=25, - enum="SmartSizeModeEnum.SmartSizeMode", - ) - applied_adsense_enabled: ad_unit_enums.AppliedAdsenseEnabledEnum.AppliedAdsenseEnabled = proto.Field( - proto.ENUM, - number=26, - enum=ad_unit_enums.AppliedAdsenseEnabledEnum.AppliedAdsenseEnabled, - ) - effective_adsense_enabled: bool = proto.Field( - proto.BOOL, - number=27, - ) -class AdUnitParent(proto.Message): - r"""The summary of a parent AdUnit. +class ListAdUnitsResponse(proto.Message): + r"""Response object for ListAdUnitsRequest containing matching + AdUnit resources. Attributes: - parent_ad_unit (str): - Output only. The parent of the current AdUnit Format: - ``networks/{network_code}/adUnits/{ad_unit_id}`` - display_name (str): - Output only. The display name of the parent - AdUnit. - ad_unit_code (str): - Output only. A string used to uniquely - identify the ad unit for the purposes of serving - the ad. - """ - - parent_ad_unit: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - ad_unit_code: str = proto.Field( - proto.STRING, - number=3, - ) - + ad_units (MutableSequence[google.ads.admanager_v1.types.AdUnit]): + The AdUnit from the specified network. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + Total number of AdUnits. If a filter was included in the + request, this reflects the total number after the filtering + is applied. -class TargetWindowEnum(proto.Message): - r"""Wrapper message for - [TargetWindow][google.ads.admanager.v1.TargetWindowEnum.TargetWindow]. + ``total_size`` will not be calculated in the response unless + it has been included in a response field mask. The response + field mask can be provided to the method by using the URL + parameter ``$fields`` or ``fields``, or by using the + HTTP/gRPC header ``X-Goog-FieldMask``. + For more information, see + https://developers.google.com/ad-manager/api/beta/field-masks """ - class TargetWindow(proto.Enum): - r"""Corresponds to an HTML link's target attribute. - See http://www.w3.org/TR/html401/present/frames.html#adef-target - - Values: - TARGET_WINDOW_UNSPECIFIED (0): - Default value. This value is unused. - TOP (1): - Specifies that the link should open in the - full body of the page. - BLANK (2): - Specifies that the link should open in a new - window. - """ - TARGET_WINDOW_UNSPECIFIED = 0 - TOP = 1 - BLANK = 2 - - -class LabelFrequencyCap(proto.Message): - r"""Frequency cap using a label. - - Attributes: - label (str): - The label to used for frequency capping. Format: - "networks/{network_code}/labels/{label_id}". - frequency_cap (google.ads.admanager_v1.types.FrequencyCap): - The frequency cap. - """ + @property + def raw_page(self): + return self - label: str = proto.Field( - proto.STRING, + ad_units: MutableSequence[ad_unit_messages.AdUnit] = proto.RepeatedField( + proto.MESSAGE, number=1, + message=ad_unit_messages.AdUnit, ) - frequency_cap: gaa_frequency_cap.FrequencyCap = proto.Field( - proto.MESSAGE, + next_page_token: str = proto.Field( + proto.STRING, number=2, - message=gaa_frequency_cap.FrequencyCap, ) - - -class SmartSizeModeEnum(proto.Message): - r"""Wrapper message for - [SmartSizeMode][google.ads.admanager.v1.SmartSizeModeEnum.SmartSizeMode]. - - """ - - class SmartSizeMode(proto.Enum): - r"""The smart size mode for this ad unit. This attribute is - optional and defaults to SmartSizeMode.NONE for fixed sizes. - - Values: - SMART_SIZE_MODE_UNSPECIFIED (0): - Default value. This value is unused. - NONE (1): - Fixed size mode (default). - SMART_BANNER (2): - The height is fixed for the request, the - width is a range. - DYNAMIC_SIZE (3): - Height and width are ranges. - """ - SMART_SIZE_MODE_UNSPECIFIED = 0 - NONE = 1 - SMART_BANNER = 2 - DYNAMIC_SIZE = 3 - - -class GetAdUnitRequest(proto.Message): - r"""Request object for GetAdUnit method. - - Attributes: - name (str): - Required. The resource name of the AdUnit. Format: - ``networks/{network_code}/adUnits/{ad_unit_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, + total_size: int = proto.Field( + proto.INT32, + number=3, ) -class ListAdUnitsRequest(proto.Message): - r"""Request object for ListAdUnits method. +class ListAdUnitSizesRequest(proto.Message): + r"""Request object for ListAdUnitSizes method. Attributes: parent (str): - Required. The parent, which owns this collection of AdUnits. - Format: ``networks/{network_code}`` + Required. The parent, which owns this collection of + AdUnitSizes. Format: ``networks/{network_code}`` page_size (int): - Optional. The maximum number of AdUnits to - return. The service may return fewer than this - value. If unspecified, at most 50 ad units will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. + Optional. The maximum number of AdUnitSizes + to return. The service may return fewer than + this value. If unspecified, at most 50 ad unit + sizes will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. page_token (str): Optional. A page token, received from a previous - ``ListAdUnits`` call. Provide this to retrieve the + ``ListAdUnitSizes`` call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to - ``ListAdUnits`` must match the call that provided the page - token. + ``ListAdUnitSizes`` must match the call that provided the + page token. filter (str): Optional. Expression to filter the response. See syntax details at @@ -472,19 +213,19 @@ class ListAdUnitsRequest(proto.Message): ) -class ListAdUnitsResponse(proto.Message): - r"""Response object for ListAdUnitsRequest containing matching - AdUnit resources. +class ListAdUnitSizesResponse(proto.Message): + r"""Response object for ListAdUnitSizesRequest containing + matching AdUnitSizes. Attributes: - ad_units (MutableSequence[google.ads.admanager_v1.types.AdUnit]): - The AdUnit from the specified network. + ad_unit_sizes (MutableSequence[google.ads.admanager_v1.types.AdUnitSize]): + The AdUnitSizes from the specified network. next_page_token (str): A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. total_size (int): - Total number of AdUnits. If a filter was included in the + Total number of AdUnitSizes. If a filter was included in the request, this reflects the total number after the filtering is applied. @@ -502,10 +243,10 @@ class ListAdUnitsResponse(proto.Message): def raw_page(self): return self - ad_units: MutableSequence["AdUnit"] = proto.RepeatedField( + ad_unit_sizes: MutableSequence[ad_unit_messages.AdUnitSize] = proto.RepeatedField( proto.MESSAGE, number=1, - message="AdUnit", + message=ad_unit_messages.AdUnitSize, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_size.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_size.py deleted file mode 100644 index 8881f72afd02..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_unit_size.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.ads.admanager_v1.types import environment_type_enum -from google.ads.admanager_v1.types import size as gaa_size - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "AdUnitSize", - }, -) - - -class AdUnitSize(proto.Message): - r"""Represents the size, environment, and companions of an ad in - an ad unit. - - Attributes: - size (google.ads.admanager_v1.types.Size): - Required. The Size of the AdUnit. - environment_type (google.ads.admanager_v1.types.EnvironmentTypeEnum.EnvironmentType): - Required. The EnvironmentType of the AdUnit - companions (MutableSequence[google.ads.admanager_v1.types.Size]): - The companions for this ad unit size. Companions are only - valid if the environment is - [VIDEO_PLAYER][google.ads.admanager.v1.EnvironmentTypeEnum.EnvironmentType]. - """ - - size: gaa_size.Size = proto.Field( - proto.MESSAGE, - number=1, - message=gaa_size.Size, - ) - environment_type: environment_type_enum.EnvironmentTypeEnum.EnvironmentType = ( - proto.Field( - proto.ENUM, - number=2, - enum=environment_type_enum.EnvironmentTypeEnum.EnvironmentType, - ) - ) - companions: MutableSequence[gaa_size.Size] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=gaa_size.Size, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/company_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/company_messages.py new file mode 100644 index 000000000000..84cb8ffea435 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/company_messages.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.ads.admanager_v1.types import ( + applied_label, + company_credit_status_enum, + company_type_enum, +) + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Company", + }, +) + + +class Company(proto.Message): + r"""The ``Company`` resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. The resource name of the ``Company``. Format: + ``networks/{network_code}/companies/{company_id}`` + company_id (int): + Output only. ``Company`` ID. + display_name (str): + Required. The display name of the ``Company``. + + This value has a maximum length of 127 characters. + type_ (google.ads.admanager_v1.types.CompanyTypeEnum.CompanyType): + Required. The type of the ``Company``. + address (str): + Optional. The address for the ``Company``. + + This value has a maximum length of 1024 characters. + email (str): + Optional. The email for the ``Company``. + + This value has a maximum length of 128 characters. + fax (str): + Optional. The fax number for the ``Company``. + + This value has a maximum length of 63 characters. + phone (str): + Optional. The phone number for the ``Company``. + + This value has a maximum length of 63 characters. + external_id (str): + Optional. The external ID for the ``Company``. + + This value has a maximum length of 255 characters. + comment (str): + Optional. Comments about the ``Company``. + + This value has a maximum length of 1024 characters. + credit_status (google.ads.admanager_v1.types.CompanyCreditStatusEnum.CompanyCreditStatus): + Optional. The credit status of the ``Company``. + + This attribute defaults to ``ACTIVE`` if basic settings are + enabled and ``ON_HOLD`` if advance settings are enabled. + applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): + Optional. The labels that are directly applied to the + ``Company``. + primary_contact (str): + Optional. The resource names of primary Contact of the + ``Company``. Format: + "networks/{network_code}/contacts/{contact_id}". + + This field is a member of `oneof`_ ``_primary_contact``. + applied_teams (MutableSequence[str]): + Optional. The resource names of Teams that are directly + associated with the ``Company``. Format: + "networks/{network_code}/teams/{team_id}". + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the ``Company`` was last modified. + third_party_company_id (int): + Optional. The ID of the Google-recognized canonicalized form + of the ``Company``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + company_id: int = proto.Field( + proto.INT64, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + type_: company_type_enum.CompanyTypeEnum.CompanyType = proto.Field( + proto.ENUM, + number=4, + enum=company_type_enum.CompanyTypeEnum.CompanyType, + ) + address: str = proto.Field( + proto.STRING, + number=5, + ) + email: str = proto.Field( + proto.STRING, + number=6, + ) + fax: str = proto.Field( + proto.STRING, + number=7, + ) + phone: str = proto.Field( + proto.STRING, + number=8, + ) + external_id: str = proto.Field( + proto.STRING, + number=9, + ) + comment: str = proto.Field( + proto.STRING, + number=10, + ) + credit_status: company_credit_status_enum.CompanyCreditStatusEnum.CompanyCreditStatus = proto.Field( + proto.ENUM, + number=11, + enum=company_credit_status_enum.CompanyCreditStatusEnum.CompanyCreditStatus, + ) + applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message=applied_label.AppliedLabel, + ) + primary_contact: str = proto.Field( + proto.STRING, + number=13, + optional=True, + ) + applied_teams: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=14, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=15, + message=timestamp_pb2.Timestamp, + ) + third_party_company_id: int = proto.Field( + proto.INT64, + number=16, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/company_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/company_service.py index 204204d7ff34..c75654a1ee37 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/company_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/company_service.py @@ -19,16 +19,11 @@ import proto # type: ignore -from google.ads.admanager_v1.types import ( - applied_label, - company_credit_status_enum, - company_type_enum, -) +from google.ads.admanager_v1.types import company_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "Company", "GetCompanyRequest", "ListCompaniesRequest", "ListCompaniesResponse", @@ -36,129 +31,6 @@ ) -class Company(proto.Message): - r"""The ``Company`` resource. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Identifier. The resource name of the ``Company``. Format: - ``networks/{network_code}/companies/{company_id}`` - company_id (int): - Output only. ``Company`` ID. - display_name (str): - Required. The display name of the ``Company``. - - This value has a maximum length of 127 characters. - type_ (google.ads.admanager_v1.types.CompanyTypeEnum.CompanyType): - Required. The type of the ``Company``. - address (str): - Optional. The address for the ``Company``. - - This value has a maximum length of 1024 characters. - email (str): - Optional. The email for the ``Company``. - - This value has a maximum length of 128 characters. - fax (str): - Optional. The fax number for the ``Company``. - - This value has a maximum length of 63 characters. - phone (str): - Optional. The phone number for the ``Company``. - - This value has a maximum length of 63 characters. - external_id (str): - Optional. The external ID for the ``Company``. - - This value has a maximum length of 255 characters. - comment (str): - Optional. Comments about the ``Company``. - - This value has a maximum length of 1024 characters. - credit_status (google.ads.admanager_v1.types.CompanyCreditStatusEnum.CompanyCreditStatus): - Optional. The credit status of this company. - - This attribute defaults to ``ACTIVE`` if basic settings are - enabled and ``ON_HOLD`` if advance settings are enabled. - applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): - Optional. The labels that are directly - applied to this company. - primary_contact (str): - Optional. The resource names of primary Contact of this - company. Format: - "networks/{network_code}/contacts/{contact_id}". - - This field is a member of `oneof`_ ``_primary_contact``. - applied_teams (MutableSequence[str]): - Optional. The resource names of Teams that are directly - associated with this company. Format: - "networks/{network_code}/teams/{team_id}". - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - company_id: int = proto.Field( - proto.INT64, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - type_: company_type_enum.CompanyTypeEnum.CompanyType = proto.Field( - proto.ENUM, - number=4, - enum=company_type_enum.CompanyTypeEnum.CompanyType, - ) - address: str = proto.Field( - proto.STRING, - number=5, - ) - email: str = proto.Field( - proto.STRING, - number=6, - ) - fax: str = proto.Field( - proto.STRING, - number=7, - ) - phone: str = proto.Field( - proto.STRING, - number=8, - ) - external_id: str = proto.Field( - proto.STRING, - number=9, - ) - comment: str = proto.Field( - proto.STRING, - number=10, - ) - credit_status: company_credit_status_enum.CompanyCreditStatusEnum.CompanyCreditStatus = proto.Field( - proto.ENUM, - number=11, - enum=company_credit_status_enum.CompanyCreditStatusEnum.CompanyCreditStatus, - ) - applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message=applied_label.AppliedLabel, - ) - primary_contact: str = proto.Field( - proto.STRING, - number=13, - optional=True, - ) - applied_teams: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=14, - ) - - class GetCompanyRequest(proto.Message): r"""Request object for ``GetCompany`` method. @@ -235,19 +107,19 @@ class ListCompaniesRequest(proto.Message): class ListCompaniesResponse(proto.Message): r"""Response object for ``ListCompaniesRequest`` containing matching - ``Company`` resources. + ``Company`` objects. Attributes: companies (MutableSequence[google.ads.admanager_v1.types.Company]): - The ``Company`` from the specified network. + The ``Company`` objects from the specified network. next_page_token (str): A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. total_size (int): - Total number of ``Companies``. If a filter was included in - the request, this reflects the total number after the - filtering is applied. + Total number of ``Company`` objects. If a filter was + included in the request, this reflects the total number + after the filtering is applied. ``total_size`` will not be calculated in the response unless it has been included in a response field mask. The response @@ -263,10 +135,10 @@ class ListCompaniesResponse(proto.Message): def raw_page(self): return self - companies: MutableSequence["Company"] = proto.RepeatedField( + companies: MutableSequence[company_messages.Company] = proto.RepeatedField( proto.MESSAGE, number=1, - message="Company", + message=company_messages.Company, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/company_type_enum.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/company_type_enum.py index f1fe0af784b0..49f5326ce645 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/company_type_enum.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/company_type_enum.py @@ -55,9 +55,6 @@ class CompanyType(proto.Enum): AD_NETWORK (5): A company representing multiple advertisers and agencies. - VIEWABILITY_PROVIDER (6): - A third-party that measures creative - viewability. """ COMPANY_TYPE_UNSPECIFIED = 0 ADVERTISER = 1 @@ -65,7 +62,6 @@ class CompanyType(proto.Enum): AGENCY = 3 HOUSE_AGENCY = 4 AD_NETWORK = 5 - VIEWABILITY_PROVIDER = 6 __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/computed_status_enum.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/computed_status_enum.py deleted file mode 100644 index 74a09cadb595..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/computed_status_enum.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "ComputedStatusEnum", - }, -) - - -class ComputedStatusEnum(proto.Message): - r"""Wrapper message for - [ComputedStatus][google.ads.admanager.v1.ComputedStatusEnum.ComputedStatus]. - - """ - - class ComputedStatus(proto.Enum): - r"""Describes the computed LineItem status that is derived from - the current state of the LineItem. - - Values: - COMPUTED_STATUS_UNSPECIFIED (0): - No value specified. - DELIVERY_EXTENDED (1): - The LineItem has past its link - LineItem#endDateTime with an auto extension, but - hasn't met its goal. - DELIVERING (2): - The LineItem has begun serving. - READY (3): - The LineItem has been activated and is ready - to serve. - PAUSED (4): - The LineItem has been paused from serving. - INACTIVE (5): - The LineItem is inactive. It is either caused - by missing creatives or the network disabling - auto-activation. - PAUSED_INVENTORY_RELEASED (6): - The LineItem has been paused and its reserved - inventory has been released. The LineItem will - not serve. - PENDING_APPROVAL (7): - The LineItem has been submitted for approval. - COMPLETED (8): - The LineItem has completed its run. - DISAPPROVED (9): - The LineItem has been disapproved and is not - eligible to serve. - DRAFT (10): - The LineItem is still being drafted. - CANCELED (11): - The LineItem has been canceled and is no - longer eligible to serve. This is a legacy - status imported from Google Ad Manager orders. - """ - COMPUTED_STATUS_UNSPECIFIED = 0 - DELIVERY_EXTENDED = 1 - DELIVERING = 2 - READY = 3 - PAUSED = 4 - INACTIVE = 5 - PAUSED_INVENTORY_RELEASED = 6 - PENDING_APPROVAL = 7 - COMPLETED = 8 - DISAPPROVED = 9 - DRAFT = 10 - CANCELED = 11 - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/contact_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/contact_messages.py new file mode 100644 index 000000000000..dfc3e05dab92 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/contact_messages.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Contact", + }, +) + + +class Contact(proto.Message): + r"""A contact represents a person who is affiliated with a single + company. A contact can have a variety of contact information + associated to it, and can be invited to view their company's + orders, line items, creatives, and reports. + + Attributes: + name (str): + Identifier. The resource name of the ``Contact``. Format: + ``networks/{network_code}/contacts/{contact_id}`` + contact_id (int): + Output only. The unique ID of the contact. + This value is readonly and is assigned by + Google. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + contact_id: int = proto.Field( + proto.INT64, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/contact_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/contact_service.py deleted file mode 100644 index a7ee0ea8a74d..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/contact_service.py +++ /dev/null @@ -1,174 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "Contact", - "GetContactRequest", - "ListContactsRequest", - "ListContactsResponse", - }, -) - - -class Contact(proto.Message): - r"""The Contact resource. - - Attributes: - name (str): - Identifier. The resource name of the Contact. Format: - ``networks/{network_code}/contacts/{contact_id}`` - contact_id (int): - Output only. ``Contact`` ID. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - contact_id: int = proto.Field( - proto.INT64, - number=2, - ) - - -class GetContactRequest(proto.Message): - r"""Request object for GetContact method. - - Attributes: - name (str): - Required. The resource name of the Contact. Format: - ``networks/{network_code}/contacts/{contact_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListContactsRequest(proto.Message): - r"""Request object for ListContacts method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - Contacts. Format: ``networks/{network_code}`` - page_size (int): - Optional. The maximum number of Contacts to - return. The service may return fewer than this - value. If unspecified, at most 50 contacts will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. A page token, received from a previous - ``ListContacts`` call. Provide this to retrieve the - subsequent page. - - When paginating, all other parameters provided to - ``ListContacts`` must match the call that provided the page - token. - filter (str): - Optional. Expression to filter the response. - See syntax details at - https://developers.google.com/ad-manager/api/beta/filters - order_by (str): - Optional. Expression to specify sorting - order. See syntax details at - https://developers.google.com/ad-manager/api/beta/filters#order - skip (int): - Optional. Number of individual resources to - skip while paginating. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - skip: int = proto.Field( - proto.INT32, - number=6, - ) - - -class ListContactsResponse(proto.Message): - r"""Response object for ListContactsRequest containing matching - Contact resources. - - Attributes: - contacts (MutableSequence[google.ads.admanager_v1.types.Contact]): - The Contact from the specified network. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - total_size (int): - Total number of Contacts. If a filter was included in the - request, this reflects the total number after the filtering - is applied. - - ``total_size`` will not be calculated in the response unless - it has been included in a response field mask. The response - field mask can be provided to the method by using the URL - parameter ``$fields`` or ``fields``, or by using the - HTTP/gRPC header ``X-Goog-FieldMask``. - - For more information, see - https://developers.google.com/ad-manager/api/beta/field-masks - """ - - @property - def raw_page(self): - return self - - contacts: MutableSequence["Contact"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Contact", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - total_size: int = proto.Field( - proto.INT32, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/creative_placeholder.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/creative_placeholder.py deleted file mode 100644 index 1b18adecffba..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/creative_placeholder.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.ads.admanager_v1.types import applied_label -from google.ads.admanager_v1.types import size as gaa_size - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "CreativePlaceholder", - }, -) - - -class CreativePlaceholder(proto.Message): - r"""Describes a slot that a creative is expected to fill. This is - used in forecasting and to validate that the correct creatives - are associated with the line item. - - Attributes: - size (google.ads.admanager_v1.types.Size): - Required. The size that the creative is - expected to have. - companion_sizes (MutableSequence[google.ads.admanager_v1.types.Size]): - The companions that the creative is expected to have. This - attribute can only be set if the line item it belongs to has - an - [EnvironmentType][google.ads.admanager.v1.EnvironmentTypeEnum.EnvironmentType] - of VIDEO_PLAYER or - [roadblocking_type][LineItem.roadblocking_type] of - CREATIVE_SET. - expected_creative_count (int): - Expected number of creatives that will be - uploaded corresponding to this creative - placeholder. This estimate is used to improve - the accuracy of forecasting; for example, if - label frequency capping limits the number of - times a creative may be served. - applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): - Set of labels applied directly to this - CreativePlaceholder. - amp_only (bool): - Indicates if the expected creative of this - placeholder has an AMP only variant. This is - used to improve the accuracy of forecasting and - has no effect on serving. - creative_targeting_display_name (str): - The display name of the creative targeting - that this CreativePlaceholder represents. - """ - - size: gaa_size.Size = proto.Field( - proto.MESSAGE, - number=1, - message=gaa_size.Size, - ) - companion_sizes: MutableSequence[gaa_size.Size] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=gaa_size.Size, - ) - expected_creative_count: int = proto.Field( - proto.INT32, - number=3, - ) - applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=applied_label.AppliedLabel, - ) - amp_only: bool = proto.Field( - proto.BOOL, - number=5, - ) - creative_targeting_display_name: str = proto.Field( - proto.STRING, - number=6, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/creative_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/creative_service.py deleted file mode 100644 index ae4a836d0a24..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/creative_service.py +++ /dev/null @@ -1,229 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -from google.protobuf import timestamp_pb2 # type: ignore -import proto # type: ignore - -from google.ads.admanager_v1.types import ( - ad_partner_declaration as gaa_ad_partner_declaration, -) - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "Creative", - "GetCreativeRequest", - "ListCreativesRequest", - "ListCreativesResponse", - }, -) - - -class Creative(proto.Message): - r"""The Creative resource. - - Attributes: - name (str): - Identifier. The resource name of the Creative. Format: - ``networks/{network_code}/creatives/{creative_id}`` - creative_id (int): - Output only. ``Creative`` ID. - display_name (str): - Optional. Display name of the ``Creative``. This attribute - has a maximum length of 255 characters. - advertiser (str): - Required. The resource name of the Company, which is of type - Company.Type.ADVERTISER, to which this Creative belongs. - Format: "networks/{network_code}/companies/{company_id}". - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant this Creative was - last modified. - preview_url (str): - Output only. The URL of the creative for - previewing the media. - size_label (str): - Output only. String representations of creative size. This - field is temporarily available and will be deprecated when - ``Creative.size`` becomes available. - ad_partner_declaration (google.ads.admanager_v1.types.AdPartnerDeclaration): - Optional. The Ad Partners associated with - this creative. This is distinct from any - associated companies that Google may detect - programmatically. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - creative_id: int = proto.Field( - proto.INT64, - number=7, - ) - display_name: str = proto.Field( - proto.STRING, - number=8, - ) - advertiser: str = proto.Field( - proto.STRING, - number=2, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - preview_url: str = proto.Field( - proto.STRING, - number=4, - ) - size_label: str = proto.Field( - proto.STRING, - number=9, - ) - ad_partner_declaration: gaa_ad_partner_declaration.AdPartnerDeclaration = ( - proto.Field( - proto.MESSAGE, - number=6, - message=gaa_ad_partner_declaration.AdPartnerDeclaration, - ) - ) - - -class GetCreativeRequest(proto.Message): - r"""Request object for GetCreative method. - - Attributes: - name (str): - Required. The resource name of the Creative. Format: - ``networks/{network_code}/creatives/{creative_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListCreativesRequest(proto.Message): - r"""Request object for ListCreatives method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - Creatives. Format: networks/{network_code} - page_size (int): - Optional. The maximum number of Creatives to - return. The service may return fewer than this - value. If unspecified, at most 50 creatives will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. A page token, received from a previous - ``ListCreatives`` call. Provide this to retrieve the - subsequent page. - - When paginating, all other parameters provided to - ``ListCreatives`` must match the call that provided the page - token. - filter (str): - Optional. Expression to filter the response. - See syntax details at - https://developers.google.com/ad-manager/api/beta/filters - order_by (str): - Optional. Expression to specify sorting - order. See syntax details at - https://developers.google.com/ad-manager/api/beta/filters#order - skip (int): - Optional. Number of individual resources to - skip while paginating. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - skip: int = proto.Field( - proto.INT32, - number=6, - ) - - -class ListCreativesResponse(proto.Message): - r"""Response object for ListCreativesRequest containing matching - Creative resources. - - Attributes: - creatives (MutableSequence[google.ads.admanager_v1.types.Creative]): - The Creative from the specified network. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - total_size (int): - Total number of Creatives. If a filter was included in the - request, this reflects the total number after the filtering - is applied. - - ``total_size`` will not be calculated in the response unless - it has been included in a response field mask. The response - field mask can be provided to the method by using the URL - parameter ``$fields`` or ``fields``, or by using the - HTTP/gRPC header ``X-Goog-FieldMask``. - - For more information, see - https://developers.google.com/ad-manager/api/beta/field-masks - """ - - @property - def raw_page(self): - return self - - creatives: MutableSequence["Creative"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Creative", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - total_size: int = proto.Field( - proto.INT32, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_messages.py new file mode 100644 index 000000000000..c1e94e44f8a1 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_messages.py @@ -0,0 +1,138 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.admanager_v1.types import custom_field_enums + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "CustomField", + "CustomFieldOption", + }, +) + + +class CustomField(proto.Message): + r"""An additional, user-created field on an entity. + + Attributes: + name (str): + Identifier. The resource name of the ``CustomField``. + Format: + ``networks/{network_code}/customFields/{custom_field_id}`` + custom_field_id (int): + Output only. Unique ID of the CustomField. + This value is readonly and is assigned by + Google. + display_name (str): + Required. Name of the CustomField. The max + length is 127 characters. + description (str): + Optional. A description of the custom field. + The maximum length is 511 characters. + status (google.ads.admanager_v1.types.CustomFieldStatusEnum.CustomFieldStatus): + Output only. The status of the ``CustomField``. + entity_type (google.ads.admanager_v1.types.CustomFieldEntityTypeEnum.CustomFieldEntityType): + Required. The type of entity the ``CustomField`` can be + applied to. + data_type (google.ads.admanager_v1.types.CustomFieldDataTypeEnum.CustomFieldDataType): + Required. The data type of the ``CustomField``. + visibility (google.ads.admanager_v1.types.CustomFieldVisibilityEnum.CustomFieldVisibility): + Required. The visibility of the ``CustomField``. + options (MutableSequence[google.ads.admanager_v1.types.CustomFieldOption]): + Optional. The drop-down options for the ``CustomField``. + + Only applicable for ``CustomField`` with the drop-down data + type. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + custom_field_id: int = proto.Field( + proto.INT64, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + status: custom_field_enums.CustomFieldStatusEnum.CustomFieldStatus = proto.Field( + proto.ENUM, + number=5, + enum=custom_field_enums.CustomFieldStatusEnum.CustomFieldStatus, + ) + entity_type: custom_field_enums.CustomFieldEntityTypeEnum.CustomFieldEntityType = ( + proto.Field( + proto.ENUM, + number=7, + enum=custom_field_enums.CustomFieldEntityTypeEnum.CustomFieldEntityType, + ) + ) + data_type: custom_field_enums.CustomFieldDataTypeEnum.CustomFieldDataType = ( + proto.Field( + proto.ENUM, + number=8, + enum=custom_field_enums.CustomFieldDataTypeEnum.CustomFieldDataType, + ) + ) + visibility: custom_field_enums.CustomFieldVisibilityEnum.CustomFieldVisibility = ( + proto.Field( + proto.ENUM, + number=9, + enum=custom_field_enums.CustomFieldVisibilityEnum.CustomFieldVisibility, + ) + ) + options: MutableSequence["CustomFieldOption"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="CustomFieldOption", + ) + + +class CustomFieldOption(proto.Message): + r"""An option for a drop-down ``CustomField``. + + Attributes: + custom_field_option_id (int): + Output only. ``CustomFieldOption`` ID. + display_name (str): + Required. The display name of the ``CustomFieldOption``. + + This value has a maximum length of 127 characters. + """ + + custom_field_option_id: int = proto.Field( + proto.INT64, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_service.py index 8a31ad64ae6f..7a014e10e456 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_service.py @@ -19,13 +19,11 @@ import proto # type: ignore -from google.ads.admanager_v1.types import custom_field_enums +from google.ads.admanager_v1.types import custom_field_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "CustomField", - "CustomFieldOption", "GetCustomFieldRequest", "ListCustomFieldsRequest", "ListCustomFieldsResponse", @@ -33,111 +31,6 @@ ) -class CustomField(proto.Message): - r"""The ``CustomField`` resource. - - Attributes: - name (str): - Identifier. The resource name of the ``CustomField``. - Format: - ``networks/{network_code}/customFields/{custom_field_id}`` - custom_field_id (int): - Output only. ``CustomField`` ID. - display_name (str): - Required. The display name of the ``CustomField``. - - This value has a maximum length of 127 characters. - description (str): - Optional. The description of the ``CustomField``. - - This value has a maximum length of 511 characters. - status (google.ads.admanager_v1.types.CustomFieldStatusEnum.CustomFieldStatus): - Output only. The status of the ``CustomField``. - entity_type (google.ads.admanager_v1.types.CustomFieldEntityTypeEnum.CustomFieldEntityType): - Required. The type of entity the ``CustomField`` can be - applied to. - data_type (google.ads.admanager_v1.types.CustomFieldDataTypeEnum.CustomFieldDataType): - Required. The data type of the ``CustomField``. - visibility (google.ads.admanager_v1.types.CustomFieldVisibilityEnum.CustomFieldVisibility): - Required. The visibility of the ``CustomField``. - options (MutableSequence[google.ads.admanager_v1.types.CustomFieldOption]): - Optional. The drop-down options for the ``CustomField``. - - Only applicable for ``CustomField`` with the drop-down data - type. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - custom_field_id: int = proto.Field( - proto.INT64, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - description: str = proto.Field( - proto.STRING, - number=4, - ) - status: custom_field_enums.CustomFieldStatusEnum.CustomFieldStatus = proto.Field( - proto.ENUM, - number=5, - enum=custom_field_enums.CustomFieldStatusEnum.CustomFieldStatus, - ) - entity_type: custom_field_enums.CustomFieldEntityTypeEnum.CustomFieldEntityType = ( - proto.Field( - proto.ENUM, - number=7, - enum=custom_field_enums.CustomFieldEntityTypeEnum.CustomFieldEntityType, - ) - ) - data_type: custom_field_enums.CustomFieldDataTypeEnum.CustomFieldDataType = ( - proto.Field( - proto.ENUM, - number=8, - enum=custom_field_enums.CustomFieldDataTypeEnum.CustomFieldDataType, - ) - ) - visibility: custom_field_enums.CustomFieldVisibilityEnum.CustomFieldVisibility = ( - proto.Field( - proto.ENUM, - number=9, - enum=custom_field_enums.CustomFieldVisibilityEnum.CustomFieldVisibility, - ) - ) - options: MutableSequence["CustomFieldOption"] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message="CustomFieldOption", - ) - - -class CustomFieldOption(proto.Message): - r"""An option for a drop-down ``CustomField``. - - Attributes: - custom_field_option_id (int): - Output only. ``CustomFieldOption`` ID. - display_name (str): - Required. The display name of the ``CustomFieldOption``. - - This value has a maximum length of 127 characters. - """ - - custom_field_option_id: int = proto.Field( - proto.INT64, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - - class GetCustomFieldRequest(proto.Message): r"""Request object for ``GetCustomField`` method. @@ -176,11 +69,12 @@ class ListCustomFieldsRequest(proto.Message): page token. filter (str): Optional. Expression to filter the response. - See syntax details at https://google.aip.dev/160 + See syntax details at + https://developers.google.com/ad-manager/api/beta/filters order_by (str): Optional. Expression to specify sorting order. See syntax details at - https://google.aip.dev/132#ordering + https://developers.google.com/ad-manager/api/beta/filters#order skip (int): Optional. Number of individual resources to skip while paginating. @@ -234,18 +128,20 @@ class ListCustomFieldsResponse(proto.Message): parameter ``$fields`` or ``fields``, or by using the HTTP/gRPC header ``X-Goog-FieldMask``. - For more information, see `System - Parameters `__. + For more information, see + https://developers.google.com/ad-manager/api/beta/field-masks """ @property def raw_page(self): return self - custom_fields: MutableSequence["CustomField"] = proto.RepeatedField( + custom_fields: MutableSequence[ + custom_field_messages.CustomField + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message="CustomField", + message=custom_field_messages.CustomField, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_value.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_value.py new file mode 100644 index 000000000000..80465b4b9fb2 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_field_value.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "CustomFieldValue", + }, +) + + +class CustomFieldValue(proto.Message): + r"""A value for a CustomField on a resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + custom_field (str): + Required. The custom field for which this is a value. + Format: + "networks/{network_code}/customFields/{custom_field_id}". + value (google.ads.admanager_v1.types.CustomFieldValue.Value): + Required. A typed value representation of the + value. + + This field is a member of `oneof`_ ``_value``. + """ + + class Value(proto.Message): + r"""Represent custom field value type. + Next Id: 5 + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dropdown_value (int): + The custom_field_option_id, if the CustomFieldDataType is + DROPDOWN. + + This field is a member of `oneof`_ ``value``. + string_value (str): + The value, if the CustomFieldDataType is + STRING. + + This field is a member of `oneof`_ ``value``. + number_value (float): + The value, if the CustomFieldDataType is + NUMBER. + + This field is a member of `oneof`_ ``value``. + toggle_value (bool): + The value, if the CustomFieldDataType is + TOGGLE. + + This field is a member of `oneof`_ ``value``. + """ + + dropdown_value: int = proto.Field( + proto.INT64, + number=1, + oneof="value", + ) + string_value: str = proto.Field( + proto.STRING, + number=2, + oneof="value", + ) + number_value: float = proto.Field( + proto.DOUBLE, + number=3, + oneof="value", + ) + toggle_value: bool = proto.Field( + proto.BOOL, + number=4, + oneof="value", + ) + + custom_field: str = proto.Field( + proto.STRING, + number=1, + ) + value: Value = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=Value, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_messages.py new file mode 100644 index 000000000000..db7f1de5e6cc --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_messages.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.admanager_v1.types import custom_targeting_key_enums + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "CustomTargetingKey", + }, +) + + +class CustomTargetingKey(proto.Message): + r"""The ``CustomTargetingKey`` resource. + + Attributes: + name (str): + Identifier. The resource name of the ``CustomTargetingKey``. + Format: + ``networks/{network_code}/customTargetingKeys/{custom_targeting_key_id}`` + custom_targeting_key_id (int): + Output only. ``CustomTargetingKey`` ID. + ad_tag_name (str): + Immutable. Name of the key. Keys can contain up to 10 + characters each. You can use alphanumeric characters and + symbols other than the following: ", ', =, !, +, #, \*, ~, + ;, ^, (, ), <, >, [, ], the white space character. + display_name (str): + Optional. Descriptive name for the ``CustomTargetingKey``. + type_ (google.ads.admanager_v1.types.CustomTargetingKeyTypeEnum.CustomTargetingKeyType): + Required. Indicates whether users will select + from predefined values or create new targeting + values, while specifying targeting criteria for + a line item. + status (google.ads.admanager_v1.types.CustomTargetingKeyStatusEnum.CustomTargetingKeyStatus): + Output only. Status of the ``CustomTargetingKey``. + reportable_type (google.ads.admanager_v1.types.CustomTargetingKeyReportableTypeEnum.CustomTargetingKeyReportableType): + Required. Reportable state of the ``CustomTargetingKey``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + custom_targeting_key_id: int = proto.Field( + proto.INT64, + number=2, + ) + ad_tag_name: str = proto.Field( + proto.STRING, + number=3, + ) + display_name: str = proto.Field( + proto.STRING, + number=4, + ) + type_: custom_targeting_key_enums.CustomTargetingKeyTypeEnum.CustomTargetingKeyType = proto.Field( + proto.ENUM, + number=5, + enum=custom_targeting_key_enums.CustomTargetingKeyTypeEnum.CustomTargetingKeyType, + ) + status: custom_targeting_key_enums.CustomTargetingKeyStatusEnum.CustomTargetingKeyStatus = proto.Field( + proto.ENUM, + number=6, + enum=custom_targeting_key_enums.CustomTargetingKeyStatusEnum.CustomTargetingKeyStatus, + ) + reportable_type: custom_targeting_key_enums.CustomTargetingKeyReportableTypeEnum.CustomTargetingKeyReportableType = proto.Field( + proto.ENUM, + number=7, + enum=custom_targeting_key_enums.CustomTargetingKeyReportableTypeEnum.CustomTargetingKeyReportableType, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_service.py index f9a8089d3e0e..f7246e702c32 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_key_service.py @@ -19,12 +19,11 @@ import proto # type: ignore -from google.ads.admanager_v1.types import custom_targeting_key_enums +from google.ads.admanager_v1.types import custom_targeting_key_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "CustomTargetingKey", "GetCustomTargetingKeyRequest", "ListCustomTargetingKeysRequest", "ListCustomTargetingKeysResponse", @@ -32,67 +31,6 @@ ) -class CustomTargetingKey(proto.Message): - r"""The ``CustomTargetingKey`` resource. - - Attributes: - name (str): - Identifier. The resource name of the ``CustomTargetingKey``. - Format: - ``networks/{network_code}/customTargetingKeys/{custom_targeting_key_id}`` - custom_targeting_key_id (int): - Output only. ``CustomTargetingKey`` ID. - ad_tag_name (str): - Immutable. Name of the key. Keys can contain up to 10 - characters each. You can use alphanumeric characters and - symbols other than the following: ", ', =, !, +, #, \*, ~, - ;, ^, (, ), <, >, [, ], the white space character. - display_name (str): - Optional. Descriptive name for the ``CustomTargetingKey``. - type_ (google.ads.admanager_v1.types.CustomTargetingKeyTypeEnum.CustomTargetingKeyType): - Required. Indicates whether users will select - from predefined values or create new targeting - values, while specifying targeting criteria for - a line item. - status (google.ads.admanager_v1.types.CustomTargetingKeyStatusEnum.CustomTargetingKeyStatus): - Output only. Status of the ``CustomTargetingKey``. - reportable_type (google.ads.admanager_v1.types.CustomTargetingKeyReportableTypeEnum.CustomTargetingKeyReportableType): - Required. Reportable state of the ``CustomTargetingKey``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - custom_targeting_key_id: int = proto.Field( - proto.INT64, - number=2, - ) - ad_tag_name: str = proto.Field( - proto.STRING, - number=3, - ) - display_name: str = proto.Field( - proto.STRING, - number=4, - ) - type_: custom_targeting_key_enums.CustomTargetingKeyTypeEnum.CustomTargetingKeyType = proto.Field( - proto.ENUM, - number=5, - enum=custom_targeting_key_enums.CustomTargetingKeyTypeEnum.CustomTargetingKeyType, - ) - status: custom_targeting_key_enums.CustomTargetingKeyStatusEnum.CustomTargetingKeyStatus = proto.Field( - proto.ENUM, - number=6, - enum=custom_targeting_key_enums.CustomTargetingKeyStatusEnum.CustomTargetingKeyStatus, - ) - reportable_type: custom_targeting_key_enums.CustomTargetingKeyReportableTypeEnum.CustomTargetingKeyReportableType = proto.Field( - proto.ENUM, - number=7, - enum=custom_targeting_key_enums.CustomTargetingKeyReportableTypeEnum.CustomTargetingKeyReportableType, - ) - - class GetCustomTargetingKeyRequest(proto.Message): r"""Request object for ``GetCustomTargetingKey`` method. @@ -200,10 +138,12 @@ class ListCustomTargetingKeysResponse(proto.Message): def raw_page(self): return self - custom_targeting_keys: MutableSequence["CustomTargetingKey"] = proto.RepeatedField( + custom_targeting_keys: MutableSequence[ + custom_targeting_key_messages.CustomTargetingKey + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message="CustomTargetingKey", + message=custom_targeting_key_messages.CustomTargetingKey, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_messages.py new file mode 100644 index 000000000000..de852fc338b4 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_messages.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.admanager_v1.types import custom_targeting_value_enums + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "CustomTargetingValue", + }, +) + + +class CustomTargetingValue(proto.Message): + r"""The ``CustomTargetingValue`` resource. + + Attributes: + name (str): + Identifier. The resource name of the + ``CustomTargetingValue``. Format: + ``networks/{network_code}/customTargetingKeys/{custom_targeting_key_id}/customTargetingValues/{custom_targeting_value_id}`` + ad_tag_name (str): + Immutable. Name of the ``CustomTargetingValue``. Values can + contain up to 40 characters each. You can use alphanumeric + characters and symbols other than the following: ", ', =, !, + +, #, \*, ~, ;, ^, (, ), <, >, [, ]. Values are not + data-specific; all values are treated as strings. For + example, instead of using "age>=18 AND <=34", try "18-34". + display_name (str): + Optional. Descriptive name for the ``CustomTargetingValue``. + match_type (google.ads.admanager_v1.types.CustomTargetingValueMatchTypeEnum.CustomTargetingValueMatchType): + Required. Immutable. The way in which the + CustomTargetingValue.name strings will be + matched. + status (google.ads.admanager_v1.types.CustomTargetingValueStatusEnum.CustomTargetingValueStatus): + Output only. Status of the ``CustomTargetingValue``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + ad_tag_name: str = proto.Field( + proto.STRING, + number=4, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + match_type: custom_targeting_value_enums.CustomTargetingValueMatchTypeEnum.CustomTargetingValueMatchType = proto.Field( + proto.ENUM, + number=6, + enum=custom_targeting_value_enums.CustomTargetingValueMatchTypeEnum.CustomTargetingValueMatchType, + ) + status: custom_targeting_value_enums.CustomTargetingValueStatusEnum.CustomTargetingValueStatus = proto.Field( + proto.ENUM, + number=7, + enum=custom_targeting_value_enums.CustomTargetingValueStatusEnum.CustomTargetingValueStatus, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_service.py index e7d7581572e8..70624af890fc 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/custom_targeting_value_service.py @@ -19,12 +19,11 @@ import proto # type: ignore -from google.ads.admanager_v1.types import custom_targeting_value_enums +from google.ads.admanager_v1.types import custom_targeting_value_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "CustomTargetingValue", "GetCustomTargetingValueRequest", "ListCustomTargetingValuesRequest", "ListCustomTargetingValuesResponse", @@ -32,55 +31,6 @@ ) -class CustomTargetingValue(proto.Message): - r"""The ``CustomTargetingValue`` resource. - - Attributes: - name (str): - Identifier. The resource name of the - ``CustomTargetingValue``. Format: - ``networks/{network_code}/customTargetingKeys/{custom_targeting_key_id}/customTargetingValues/{custom_targeting_value_id}`` - ad_tag_name (str): - Immutable. Name of the ``CustomTargetingValue``. Values can - contain up to 40 characters each. You can use alphanumeric - characters and symbols other than the following: ", ', =, !, - +, #, \*, ~, ;, ^, (, ), <, >, [, ]. Values are not - data-specific; all values are treated as string. For - example, instead of using "age>=18 AND <=34", try "18-34". - display_name (str): - Optional. Descriptive name for the ``CustomTargetingValue``. - match_type (google.ads.admanager_v1.types.CustomTargetingValueMatchTypeEnum.CustomTargetingValueMatchType): - Required. The way in which the - CustomTargetingValue.name strings will be - matched. - status (google.ads.admanager_v1.types.CustomTargetingValueStatusEnum.CustomTargetingValueStatus): - Output only. Status of the ``CustomTargetingValue``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - ad_tag_name: str = proto.Field( - proto.STRING, - number=4, - ) - display_name: str = proto.Field( - proto.STRING, - number=5, - ) - match_type: custom_targeting_value_enums.CustomTargetingValueMatchTypeEnum.CustomTargetingValueMatchType = proto.Field( - proto.ENUM, - number=6, - enum=custom_targeting_value_enums.CustomTargetingValueMatchTypeEnum.CustomTargetingValueMatchType, - ) - status: custom_targeting_value_enums.CustomTargetingValueStatusEnum.CustomTargetingValueStatus = proto.Field( - proto.ENUM, - number=7, - enum=custom_targeting_value_enums.CustomTargetingValueStatusEnum.CustomTargetingValueStatus, - ) - - class GetCustomTargetingValueRequest(proto.Message): r"""Request object for ``GetCustomTargetingValue`` method. @@ -190,11 +140,11 @@ def raw_page(self): return self custom_targeting_values: MutableSequence[ - "CustomTargetingValue" + custom_targeting_value_messages.CustomTargetingValue ] = proto.RepeatedField( proto.MESSAGE, number=1, - message="CustomTargetingValue", + message=custom_targeting_value_messages.CustomTargetingValue, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_messages.py new file mode 100644 index 000000000000..79fdf386e430 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_messages.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "EntitySignalsMapping", + }, +) + + +class EntitySignalsMapping(proto.Message): + r"""The ``EntitySignalsMapping`` resource. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + audience_segment_id (int): + ID of an AudienceSegment that this mapping + belongs to. + + This field is a member of `oneof`_ ``entity``. + content_bundle_id (int): + ID of a ContentBundle that this mapping + belongs to. + + This field is a member of `oneof`_ ``entity``. + custom_targeting_value_id (int): + ID of a CustomValue that this mapping belongs + to. + + This field is a member of `oneof`_ ``entity``. + name (str): + Identifier. The resource name of the + ``EntitySignalsMapping``. Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping_id}`` + entity_signals_mapping_id (int): + Output only. ``EntitySignalsMapping`` ID. + taxonomy_category_ids (MutableSequence[int]): + Required. The IDs of the categories that are + associated with the referencing entity. + """ + + audience_segment_id: int = proto.Field( + proto.INT64, + number=3, + oneof="entity", + ) + content_bundle_id: int = proto.Field( + proto.INT64, + number=4, + oneof="entity", + ) + custom_targeting_value_id: int = proto.Field( + proto.INT64, + number=5, + oneof="entity", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + entity_signals_mapping_id: int = proto.Field( + proto.INT64, + number=2, + ) + taxonomy_category_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_service.py new file mode 100644 index 000000000000..f63377f55ea1 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/entity_signals_mapping_service.py @@ -0,0 +1,306 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.ads.admanager_v1.types import entity_signals_mapping_messages + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "GetEntitySignalsMappingRequest", + "ListEntitySignalsMappingsRequest", + "CreateEntitySignalsMappingRequest", + "UpdateEntitySignalsMappingRequest", + "ListEntitySignalsMappingsResponse", + "BatchCreateEntitySignalsMappingsRequest", + "BatchCreateEntitySignalsMappingsResponse", + "BatchUpdateEntitySignalsMappingsRequest", + "BatchUpdateEntitySignalsMappingsResponse", + }, +) + + +class GetEntitySignalsMappingRequest(proto.Message): + r"""Request object for ``GetEntitySignalsMapping`` method. + + Attributes: + name (str): + Required. The resource name of the EntitySignalsMapping. + Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListEntitySignalsMappingsRequest(proto.Message): + r"""Request object for ``ListEntitySignalsMappings`` method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + EntitySignalsMappings. Format: ``networks/{network_code}`` + page_size (int): + Optional. The maximum number of ``EntitySignalsMappings`` to + return. The service may return fewer than this value. If + unspecified, at most 50 ``EntitySignalsMappings`` will be + returned. The maximum value is 1000; values above 1000 will + be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListEntitySignalsMappings`` call. Provide this to retrieve + the subsequent page. + + When paginating, all other parameters provided to + ``ListEntitySignalsMappings`` must match the call that + provided the page token. + filter (str): + Optional. Expression to filter the response. + See syntax details at + https://developers.google.com/ad-manager/api/beta/filters + order_by (str): + Optional. Expression to specify sorting + order. See syntax details at + https://developers.google.com/ad-manager/api/beta/filters#order + skip (int): + Optional. Number of individual resources to + skip while paginating. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + skip: int = proto.Field( + proto.INT32, + number=6, + ) + + +class CreateEntitySignalsMappingRequest(proto.Message): + r"""Request object for 'CreateEntitySignalsMapping' method. + + Attributes: + parent (str): + Required. The parent resource where this + EntitySignalsMapping will be created. Format: + ``networks/{network_code}`` + entity_signals_mapping (google.ads.admanager_v1.types.EntitySignalsMapping): + Required. The EntitySignalsMapping object to + create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entity_signals_mapping: entity_signals_mapping_messages.EntitySignalsMapping = ( + proto.Field( + proto.MESSAGE, + number=2, + message=entity_signals_mapping_messages.EntitySignalsMapping, + ) + ) + + +class UpdateEntitySignalsMappingRequest(proto.Message): + r"""Request object for 'UpdateEntitySignalsMapping' method. + + Attributes: + entity_signals_mapping (google.ads.admanager_v1.types.EntitySignalsMapping): + Required. The ``EntitySignalsMapping`` to update. + + The EntitySignalsMapping's name is used to identify the + EntitySignalsMapping to update. Format: + ``networks/{network_code}/entitySignalsMappings/{entity_signals_mapping}`` + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + entity_signals_mapping: entity_signals_mapping_messages.EntitySignalsMapping = ( + proto.Field( + proto.MESSAGE, + number=1, + message=entity_signals_mapping_messages.EntitySignalsMapping, + ) + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListEntitySignalsMappingsResponse(proto.Message): + r"""Response object for ``ListEntitySignalsMappingsRequest`` containing + matching ``EntitySignalsMapping`` resources. + + Attributes: + entity_signals_mappings (MutableSequence[google.ads.admanager_v1.types.EntitySignalsMapping]): + The ``EntitySignalsMapping`` from the specified network. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + Total number of ``EntitySignalsMappings``. If a filter was + included in the request, this reflects the total number + after the filtering is applied. + + ``total_size`` will not be calculated in the response unless + it has been included in a response field mask. The response + field mask can be provided to the method by using the URL + parameter ``$fields`` or ``fields``, or by using the + HTTP/gRPC header ``X-Goog-FieldMask``. + + For more information, see + https://developers.google.com/ad-manager/api/beta/field-masks + """ + + @property + def raw_page(self): + return self + + entity_signals_mappings: MutableSequence[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity_signals_mapping_messages.EntitySignalsMapping, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class BatchCreateEntitySignalsMappingsRequest(proto.Message): + r"""Request object for ``BatchCreateEntitySignalsMappings`` method. + + Attributes: + parent (str): + Required. The parent resource where + ``EntitySignalsMappings`` will be created. Format: + ``networks/{network_code}`` The parent field in the + CreateEntitySignalsMappingRequest must match this field. + requests (MutableSequence[google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest]): + Required. The ``EntitySignalsMapping`` objects to create. A + maximum of 100 objects can be created in a batch. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence[ + "CreateEntitySignalsMappingRequest" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CreateEntitySignalsMappingRequest", + ) + + +class BatchCreateEntitySignalsMappingsResponse(proto.Message): + r"""Response object for ``BatchCreateEntitySignalsMappings`` method. + + Attributes: + entity_signals_mappings (MutableSequence[google.ads.admanager_v1.types.EntitySignalsMapping]): + The ``EntitySignalsMapping`` objects created. + """ + + entity_signals_mappings: MutableSequence[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity_signals_mapping_messages.EntitySignalsMapping, + ) + + +class BatchUpdateEntitySignalsMappingsRequest(proto.Message): + r"""Request object for ``BatchUpdateEntitySignalsMappings`` method. + + Attributes: + parent (str): + Required. The parent resource where + ``EntitySignalsMappings`` will be updated. Format: + ``networks/{network_code}`` The parent field in the + UpdateEntitySignalsMappingRequest must match this field. + requests (MutableSequence[google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest]): + Required. The ``EntitySignalsMapping`` objects to update. A + maximum of 100 objects can be updated in a batch. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence[ + "UpdateEntitySignalsMappingRequest" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="UpdateEntitySignalsMappingRequest", + ) + + +class BatchUpdateEntitySignalsMappingsResponse(proto.Message): + r"""Response object for ``BatchUpdateEntitySignalsMappings`` method. + + Attributes: + entity_signals_mappings (MutableSequence[google.ads.admanager_v1.types.EntitySignalsMapping]): + The ``EntitySignalsMapping`` objects updated. + """ + + entity_signals_mappings: MutableSequence[ + entity_signals_mapping_messages.EntitySignalsMapping + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=entity_signals_mapping_messages.EntitySignalsMapping, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/frequency_cap.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/frequency_cap.py index ecaf3a433ae7..62ff46a14006 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/frequency_cap.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/frequency_cap.py @@ -19,11 +19,12 @@ import proto # type: ignore +from google.ads.admanager_v1.types import time_unit_enum + __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ "FrequencyCap", - "TimeUnitEnum", }, ) @@ -60,51 +61,12 @@ class FrequencyCap(proto.Message): number=2, optional=True, ) - time_unit: "TimeUnitEnum.TimeUnit" = proto.Field( + time_unit: time_unit_enum.TimeUnitEnum.TimeUnit = proto.Field( proto.ENUM, number=3, optional=True, - enum="TimeUnitEnum.TimeUnit", + enum=time_unit_enum.TimeUnitEnum.TimeUnit, ) -class TimeUnitEnum(proto.Message): - r"""Wrapper message for TimeUnit.""" - - class TimeUnit(proto.Enum): - r"""Unit of time for the frequency cap. - - Values: - TIME_UNIT_UNSPECIFIED (0): - Default value. This value is unused. - MINUTE (1): - Minute - HOUR (2): - Hour - DAY (3): - Day - WEEK (4): - Week - MONTH (5): - Month - LIFETIME (6): - Lifetime - POD (7): - Per pod of ads in a video stream. Only valid for entities in - a VIDEO_PLAYER environment. - STREAM (8): - Per video stream. Only valid for entities in a VIDEO_PLAYER - environment. - """ - TIME_UNIT_UNSPECIFIED = 0 - MINUTE = 1 - HOUR = 2 - DAY = 3 - WEEK = 4 - MONTH = 5 - LIFETIME = 6 - POD = 7 - STREAM = 8 - - __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/goal.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/goal.py deleted file mode 100644 index d73e088136fa..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/goal.py +++ /dev/null @@ -1,204 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "Goal", - "GoalTypeEnum", - "UnitTypeEnum", - }, -) - - -class Goal(proto.Message): - r"""Defines the criteria a [LineItem][google.ads.admanager.v1.LineItem] - needs to satisfy to meet its delivery goal. - - Attributes: - goal_type (google.ads.admanager_v1.types.GoalTypeEnum.GoalType): - The type of the goal for the LineItem. It - defines the period over which the goal should be - reached. - unit_type (google.ads.admanager_v1.types.UnitTypeEnum.UnitType): - The type of the goal unit for the LineItem. - units (int): - If this is a primary goal, it represents the number or - percentage of impressions or clicks that will be reserved. - If the line item is of type - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP], - it represents the percentage of available impressions - reserved. If the line item is of type - [LineItemTypeEnum.LineItemType.BULK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BULK] - or - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY], - it represents the number of remaining impressions reserved. - If the line item is of type - [LineItemTypeEnum.LineItemType.NETWORK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.NETWORK] - or - [LineItemTypeEnum.LineItemType.HOUSE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.HOUSE], - it represents the percentage of remaining impressions - reserved. If this is an impression cap goal, it represents - the number of impressions or conversions that the line item - will stop serving at if reached. For valid line item types, - see [LineItem.impressions_cap][]. - """ - - goal_type: "GoalTypeEnum.GoalType" = proto.Field( - proto.ENUM, - number=1, - enum="GoalTypeEnum.GoalType", - ) - unit_type: "UnitTypeEnum.UnitType" = proto.Field( - proto.ENUM, - number=2, - enum="UnitTypeEnum.UnitType", - ) - units: int = proto.Field( - proto.INT64, - number=3, - ) - - -class GoalTypeEnum(proto.Message): - r"""Wrapper message for - [GoalType][google.ads.admanager.v1.GoalTypeEnum.GoalType]. - - """ - - class GoalType(proto.Enum): - r"""Specifies the type of the goal for a LineItem. - - Values: - GOAL_TYPE_UNSPECIFIED (0): - Default value. This value is unused. - NONE (1): - No goal is specified for the number of ads delivered. The - line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY] - - [LineItemTypeEnum.LineItemType.AD_EXCHANGE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.AD_EXCHANGE] - - [LineItemTypeEnum.LineItemType.CLICK_TRACKING][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.CLICK_TRACKING] - LIFETIME (2): - There is a goal on the number of ads delivered for this line - item during its entire lifetime. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - - [LineItemTypeEnum.LineItemType.BULK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BULK] - - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY] - - [LineItemTypeEnum.LineItemType.ADSENSE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.ADSENSE] - - [LineItemTypeEnum.LineItemType.AD_EXCHANGE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.AD_EXCHANGE] - - [LineItemTypeEnum.LineItemType.ADMOB][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.ADMOB] - - [LineItemTypeEnum.LineItemType.CLICK_TRACKING][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.CLICK_TRACKING] - DAILY (3): - There is a daily goal on the number of ads delivered for - this line item. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP] - - [LineItemTypeEnum.LineItemType.NETWORK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.NETWORK] - - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY] - - [LineItemTypeEnum.LineItemType.HOUSE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.HOUSE] - - [LineItemTypeEnum.LineItemType.ADSENSE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.ADSENSE] - - [LineItemTypeEnum.LineItemType.AD_EXCHANGE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.AD_EXCHANGE] - - [LineItemTypeEnum.LineItemType.ADMOB][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.ADMOB] - - [LineItemTypeEnum.LineItemType.BUMPER][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BUMPER] - """ - GOAL_TYPE_UNSPECIFIED = 0 - NONE = 1 - LIFETIME = 2 - DAILY = 3 - - -class UnitTypeEnum(proto.Message): - r"""Wrapper message for - [UnitType][google.ads.admanager.v1.UnitTypeEnum.UnitType]. - - """ - - class UnitType(proto.Enum): - r"""Indicates the type of unit used for defining a reservation. The - [LineItem.cost_type][google.ads.admanager.v1.LineItem.cost_type] can - differ from the UnitType - an ad can have an impression goal, but be - billed by its click. Usually CostType and UnitType will refer to the - same unit. - - Values: - UNIT_TYPE_UNSPECIFIED (0): - Default value. This value is unused. - IMPRESSIONS (1): - The number of impressions served by creatives - associated with the line item. - CLICKS (2): - The number of clicks reported by creatives associated with - the line item. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - - [LineItemTypeEnum.LineItemType.BULK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BULK] - - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY] - CLICK_THROUGH_CPA_CONVERSIONS (3): - The number of view-through Cost-Per-Action (CPA) conversions - from creatives associated with the line item. This is only - supported as secondary goal and the - [LineItem.cost_type][google.ads.admanager.v1.LineItem.cost_type] - must be [CostTypeEnum.CostType.CPA][]. - VIEW_THROUGH_CPA_CONVERSIONS (4): - The number of view-through Cost-Per-Action (CPA) conversions - from creatives associated with the line item. This is only - supported as secondary goal and the - [LineItem.cost_type][google.ads.admanager.v1.LineItem.cost_type] - must be [CostTypeEnum.CostType.CPA}. - TOTAL_CPA_CONVERSIONS (5): - The number of total Cost-Per-Action (CPA) conversions from - creatives associated with the line item. This is only - supported as secondary goal and the [LineItem.cost_type} - must be [CostTypeEnum.CostType.CPA}. - VIEWABLE_IMPRESSIONS (6): - The number of viewable impressions reported by creatives - associated with the line item. The - [LineItem.line_item_type][google.ads.admanager.v1.LineItem.line_item_type] - must be - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD]. - IN_TARGET_IMPRESSIONS (7): - The number of in-target impressions reported by third party - measurements. The - [LineItem.line_item_type][google.ads.admanager.v1.LineItem.line_item_type] - must be - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD]. - """ - UNIT_TYPE_UNSPECIFIED = 0 - IMPRESSIONS = 1 - CLICKS = 2 - CLICK_THROUGH_CPA_CONVERSIONS = 3 - VIEW_THROUGH_CPA_CONVERSIONS = 4 - TOTAL_CPA_CONVERSIONS = 5 - VIEWABLE_IMPRESSIONS = 6 - IN_TARGET_IMPRESSIONS = 7 - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/label_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/label_messages.py new file mode 100644 index 000000000000..4f95e140723a --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/label_messages.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Label", + }, +) + + +class Label(proto.Message): + r"""A Label is additional information that can be added to an + entity. + + Attributes: + name (str): + Identifier. The resource name of the ``Label``. Format: + ``networks/{network_code}/labels/{label_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/label_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/label_service.py deleted file mode 100644 index cfb44f629186..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/label_service.py +++ /dev/null @@ -1,168 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "Label", - "GetLabelRequest", - "ListLabelsRequest", - "ListLabelsResponse", - }, -) - - -class Label(proto.Message): - r"""The Label resource. - - Attributes: - name (str): - Identifier. The resource name of the Label. Format: - ``networks/{network_code}/labels/{label_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetLabelRequest(proto.Message): - r"""Request object for GetLabel method. - - Attributes: - name (str): - Required. The resource name of the Label. Format: - ``networks/{network_code}/labels/{label_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListLabelsRequest(proto.Message): - r"""Request object for ListLabels method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of Labels. - Format: ``networks/{network_code}`` - page_size (int): - Optional. The maximum number of Labels to - return. The service may return fewer than this - value. If unspecified, at most 50 labels will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. A page token, received from a previous - ``ListLabels`` call. Provide this to retrieve the subsequent - page. - - When paginating, all other parameters provided to - ``ListLabels`` must match the call that provided the page - token. - filter (str): - Optional. Expression to filter the response. - See syntax details at - https://developers.google.com/ad-manager/api/beta/filters - order_by (str): - Optional. Expression to specify sorting - order. See syntax details at - https://developers.google.com/ad-manager/api/beta/filters#order - skip (int): - Optional. Number of individual resources to - skip while paginating. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - skip: int = proto.Field( - proto.INT32, - number=6, - ) - - -class ListLabelsResponse(proto.Message): - r"""Response object for ListLabelsRequest containing matching - Label resources. - - Attributes: - labels (MutableSequence[google.ads.admanager_v1.types.Label]): - The Label from the specified network. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - total_size (int): - Total number of Labels. If a filter was included in the - request, this reflects the total number after the filtering - is applied. - - ``total_size`` will not be calculated in the response unless - it has been included in a response field mask. The response - field mask can be provided to the method by using the URL - parameter ``$fields`` or ``fields``, or by using the - HTTP/gRPC header ``X-Goog-FieldMask``. - - For more information, see - https://developers.google.com/ad-manager/api/beta/field-masks - """ - - @property - def raw_page(self): - return self - - labels: MutableSequence["Label"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Label", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - total_size: int = proto.Field( - proto.INT32, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_enums.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_enums.py deleted file mode 100644 index 6f993c155a83..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_enums.py +++ /dev/null @@ -1,314 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "LineItemCostTypeEnum", - "CreativeRotationTypeEnum", - "DeliveryRateTypeEnum", - "LineItemDiscountTypeEnum", - "LineItemTypeEnum", - "ReservationStatusEnum", - }, -) - - -class LineItemCostTypeEnum(proto.Message): - r"""Wrapper message for - [LineItemCostType][google.ads.admanager.v1.LineItemCostTypeEnum.LineItemCostType]. - - """ - - class LineItemCostType(proto.Enum): - r"""Describes the LineItem actions that are billable. - - Values: - LINE_ITEM_COST_TYPE_UNSPECIFIED (0): - Not specified value. - CPA (1): - Cost per action. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP] - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - - [LineItemTypeEnum.LineItemType.BULK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BULK] - - [LineItemTypeEnum.LineItemType.NETWORK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.NETWORK] - CPC (2): - Cost per click. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP] - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - - [LineItemTypeEnum.LineItemType.BULK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BULK] - - [LineItemTypeEnum.LineItemType.NETWORK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.NETWORK] - - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY] - - [LineItemTypeEnum.LineItemType.HOUSE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.HOUSE] - CPD (3): - Cost per day. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP] - - [LineItemTypeEnum.LineItemType.NETWORK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.NETWORK] - CPM (4): - Cost per mille (thousand) impressions. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP] - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - - [LineItemTypeEnum.LineItemType.BULK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.BULK] - - [LineItemTypeEnum.LineItemType.NETWORK][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.NETWORK] - - [LineItemTypeEnum.LineItemType.PRICE_PRIORITY][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.PRICE_PRIORITY] - - [LineItemTypeEnum.LineItemType.HOUSE][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.HOUSE] - VCPM (5): - Cost per mille (thousand) Active View viewable impressions. - The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be one of: - - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - CPM_IN_TARGET (6): - Cost per millie (thousand) in-target impressions. The line - item [type][google.ads.admanager.v1.LineItem.line_item_type] - must be one of: - - - [LineItemTypeEnum.LineItemType.STANDARD][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.STANDARD] - CPF (7): - Cost for the entire flight of the deal. The line item - [type][google.ads.admanager.v1.LineItem.line_item_type] must - be must be one of: - - - [LineItemTypeEnum.LineItemType.SPONSORSHIP][google.ads.admanager.v1.LineItemTypeEnum.LineItemType.SPONSORSHIP] - """ - LINE_ITEM_COST_TYPE_UNSPECIFIED = 0 - CPA = 1 - CPC = 2 - CPD = 3 - CPM = 4 - VCPM = 5 - CPM_IN_TARGET = 6 - CPF = 7 - - -class CreativeRotationTypeEnum(proto.Message): - r"""Wrapper message for - [CreativeRotationType][google.ads.admanager.v1.CreativeRotationTypeEnum.CreativeRotationType]. - - """ - - class CreativeRotationType(proto.Enum): - r"""The strategy to use for displaying multiple - [creatives][google.ads.admanager.v1.Creative] that are associated - with a line item. - - Values: - CREATIVE_ROTATION_TYPE_UNSPECIFIED (0): - Not specified value - EVENLY (1): - Creatives are displayed approximately the - same number of times over the duration of the - line item. - OPTIMIZED (2): - Creatives are served approximately - proportionally to their performance. - WEIGHTED (3): - Creatives are served approximately proportionally to their - weights, set on the ``LineItemCreativeAssociation``. - SEQUENTIAL (4): - Creatives are served exactly in sequential order, aka - Storyboarding. Set on the ``LineItemCreativeAssociation``. - """ - CREATIVE_ROTATION_TYPE_UNSPECIFIED = 0 - EVENLY = 1 - OPTIMIZED = 2 - WEIGHTED = 3 - SEQUENTIAL = 4 - - -class DeliveryRateTypeEnum(proto.Message): - r"""Wrapper message for - [DeliveryRateType][google.ads.admanager.v1.DeliveryRateTypeEnum.DeliveryRateType]. - - """ - - class DeliveryRateType(proto.Enum): - r"""Possible delivery rates for a line item. It dictates the - manner in which the line item is served. - - Values: - DELIVERY_RATE_TYPE_UNSPECIFIED (0): - Not specified value - EVENLY (1): - Line items are served as evenly as possible across the - number of days specified in a line item's - [duration][LineItem.duration]. - FRONTLOADED (2): - Line items are served more aggressively in - the beginning of the flight date. - AS_FAST_AS_POSSIBLE (3): - The booked impressions may delivered well before the - [end_time][google.ads.admanager.v1.LineItem.end_time]. Other - lower-priority or lower-value line items will be stopped - from delivering until the line item meets the number of - impressions or clicks it is booked for. - """ - DELIVERY_RATE_TYPE_UNSPECIFIED = 0 - EVENLY = 1 - FRONTLOADED = 2 - AS_FAST_AS_POSSIBLE = 3 - - -class LineItemDiscountTypeEnum(proto.Message): - r"""Wrapper message for - [LineItemDiscountType][google.ads.admanager.v1.LineItemDiscountTypeEnum.LineItemDiscountType]. - - """ - - class LineItemDiscountType(proto.Enum): - r"""Describes the possible discount types on the cost of booking - a line item. - - Values: - LINE_ITEM_DISCOUNT_TYPE_UNSPECIFIED (0): - No value specified - ABSOLUTE_VALUE (1): - An absolute value will be discounted from the - line item's cost. - PERCENTAGE (2): - A percentage of the cost will be discounted - for booking the line item. - """ - LINE_ITEM_DISCOUNT_TYPE_UNSPECIFIED = 0 - ABSOLUTE_VALUE = 1 - PERCENTAGE = 2 - - -class LineItemTypeEnum(proto.Message): - r"""Wrapper message for - [LineItemType][google.ads.admanager.v1.LineItemTypeEnum.LineItemType]. - - """ - - class LineItemType(proto.Enum): - r"""Indicates the priority of a LineItem, determined by the way - in which impressions are reserved to be served for it. - - Values: - LINE_ITEM_TYPE_UNSPECIFIED (0): - Not specified value. - SPONSORSHIP (12): - The type of LineItem for which a percentage - of all the impressions that are being sold are - reserved. - STANDARD (13): - The type of LineItem for which a fixed - quantity of impressions or clicks are reserved. - NETWORK (9): - The type of LineItem most commonly used to - fill a site's unsold inventory if not - contractually obligated to deliver a requested - number of impressions. Uses daily percentage of - unsold impressions or clicks. - BULK (4): - The type of LineItem for which a fixed - quantity of impressions or clicks will be - delivered at a priority lower than the STANDARD - type. - PRICE_PRIORITY (11): - The type of LineItem most commonly used to - fill a site's unsold inventory if not - contractually obligated to deliver a requested - number of impressions. Uses fixed quantity - percentage of unsold impressions or clicks. - HOUSE (7): - The type of LineItem typically used for ads - that promote products and services chosen by the - publisher. - LEGACY_DFP (8): - Represents a legacy LineItem that has been - migrated from the DFP system. - CLICK_TRACKING (6): - The type of LineItem used for ads that track - ads being served externally of Ad Manager. - ADSENSE (2): - A LineItem using dynamic allocation backed by - AdSense. - AD_EXCHANGE (3): - A LineItem using dynamic allocation backed by - the Google Ad Exchange. - BUMPER (5): - Represents a non-monetizable video LineItem - that targets one or more bumper positions, which - are short house video messages used by - publishers to separate content from ad breaks. - ADMOB (1): - A LineItem using dynamic allocation backed by - AdMob. - PREFERRED_DEAL (10): - The type of LineItem for which there are no - impressions reserved, and will serve for a - second price bid. - """ - LINE_ITEM_TYPE_UNSPECIFIED = 0 - SPONSORSHIP = 12 - STANDARD = 13 - NETWORK = 9 - BULK = 4 - PRICE_PRIORITY = 11 - HOUSE = 7 - LEGACY_DFP = 8 - CLICK_TRACKING = 6 - ADSENSE = 2 - AD_EXCHANGE = 3 - BUMPER = 5 - ADMOB = 1 - PREFERRED_DEAL = 10 - - -class ReservationStatusEnum(proto.Message): - r"""Wrapper message for - [ReservationStatus][google.ads.admanager.v1.ReservationStatusEnum.ReservationStatus]. - - """ - - class ReservationStatus(proto.Enum): - r"""Defines the different reservation statuses of a line item. - - Values: - RESERVATION_STATUS_UNSPECIFIED (0): - No value specified - RESERVED (1): - Indicates that inventory has been reserved - for the line item. - UNRESERVED (2): - Indicates that inventory has not been - reserved for the line item. - """ - RESERVATION_STATUS_UNSPECIFIED = 0 - RESERVED = 1 - UNRESERVED = 2 - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_service.py deleted file mode 100644 index 6aaf37448195..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/line_item_service.py +++ /dev/null @@ -1,491 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import money_pb2 # type: ignore -import proto # type: ignore - -from google.ads.admanager_v1.types import ( - computed_status_enum, - creative_placeholder, - environment_type_enum, - goal, - line_item_enums, -) - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "LineItem", - "GetLineItemRequest", - "ListLineItemsRequest", - "ListLineItemsResponse", - }, -) - - -class LineItem(proto.Message): - r"""The LineItem resource. - - Attributes: - name (str): - Identifier. The resource name of the LineItem. Format: - ``networks/{network_code}/orders/{order_id}/lineItems/{line_item_id}`` - display_name (str): - Optional. Display name of the LineItem. This - attribute has a maximum length of 255 - characters. - archived (bool): - Output only. The archival status of the - LineItem. - contracted_units_bought (int): - Optional. This attribute is only applicable for certain - [line item types][LineItemType] and acts as an "FYI" or - note, which does not impact ad-serving or other backend - systems. - - For [SPONSORSHIP][LineItemType.SPONSORSHIP] line items, this - represents the minimum quantity, which is a lifetime - impression volume goal for reporting purposes. - - For [STANDARD][LineItemType.STANDARD] line items, this - represents the contracted quantity, which is the number of - units specified in the contract that the advertiser has - bought for this line item. This attribute is only available - if you have this feature enabled on your network. - cost_per_unit (google.type.money_pb2.Money): - Required. The amount of money to spend per - impression or click. - cost_type (google.ads.admanager_v1.types.LineItemCostTypeEnum.LineItemCostType): - Required. The method used for billing this - line item. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant at which the - LineItem was created. This attribute may be null - for line items created before this feature was - introduced. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant at which the - LineItem was last updated - creative_rotation_type (google.ads.admanager_v1.types.CreativeRotationTypeEnum.CreativeRotationType): - Required. The strategy used for displaying multiple - [creatives][google.ads.admanager.v1.Creative] that are - associated with the line item. - delivery_rate_type (google.ads.admanager_v1.types.DeliveryRateTypeEnum.DeliveryRateType): - Non-empty default. The strategy for delivering ads over the - duration of the line item. Defaults to - [EVENLY][DeliveryRateType.EVENLY] or - [FRONTLOADED][DeliveryRatetype.FRONTLOADED] depending on the - network's configuration. - discount (float): - Optional. The number here is either a percentage or an - absolute value depending on the - [discount_type][google.ads.admanager.v1.LineItem.discount_type]. - If it is [PERCENTAGE][LineItemDiscountType.PERCENTAGE], then - only non-fractional values are supported. - discount_type (google.ads.admanager_v1.types.LineItemDiscountTypeEnum.LineItemDiscountType): - Non-empty default. The type of discount applied to the line - item. Defaults to - [PERCENTAGE][LineItemDiscountType.PERCENTAGE]. - environment_type (google.ads.admanager_v1.types.EnvironmentTypeEnum.EnvironmentType): - Non-empty default. The environment that the line item is - targeting. The default value is - [BROWSER][EnvironmentType.BROWSER]. If this value is - [VIDEO_PLAYER][EnvironmentType.VIDEO_PLAYER], then this line - item can only target - [AdUnits][google.ads.admanager.v1.AdUnit] that have - ``AdUnitSizes`` whose ``environment_type`` is also - ``VIDEO_PLAYER``. - external_id (str): - Optional. Identifier for the LineItem that is - meaningful to the publisher. This attribute has - a maximum length of 255 characters. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Required. Time at which the LineItem will - begin serving. This attribute must be in the - future when creating a LineItem. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. Time at which the LineItem will stop serving. This - attribute is ignored when - [unlimited_end_time][google.ads.admanager.v1.LineItem.unlimited_end_time] - is ``true``. If specified, it must be after - [start_time][google.ads.admanager.v1.LineItem.start_time]. - This end time does not include - [auto_extension_days][google.ads.admanager.v1.LineItem.auto_extension_days]. - auto_extension_days (int): - Optional. Number of days to allow a LineItem to deliver past - its [end_time][google.ads.admanager.v1.LineItem.end_time]. A - maximum of 7 days is allowed. This feature is only available - for Ad Manager 360 accounts. - unlimited_end_time (bool): - Optional. Whether the LineItem has an - [end_time][google.ads.admanager.v1.LineItem.end_time]. This - attribute can be set to ``true`` for only LineItems with - [line_item_type][google.ads.admanager.v1.LineItem.line_item_type] - [SPONSORSHIP][LineItemType.SPONSORSHIP], - [NETWORK][LineItemType.NETWORK], - [PRICE_PRIORITY][LineItemType.PRICE_PRIORITY] and - [HOUSE][LineItemType.HOUSE]. - last_modified_by_app (str): - Output only. The application that last - modified this line item. - line_item_type (google.ads.admanager_v1.types.LineItemTypeEnum.LineItemType): - Required. Determines the default priority of the LineItem - for delivery. More information can be found on the `Ad - Manager Help - Center `__. - missing_creatives (bool): - Output only. Indicates if a line item is missing any - [creatives][google.ads.admanager.v1.Creative] for the - [creative_placeholders][google.ads.admanager.v1.LineItem.creative_placeholders] - specified. - - [Creatives][google.ads.admanager.v1.Creative] can be - considered missing for several reasons: - - - Not enough [creatives][google.ads.admanager.v1.Creative] - of a certain size have been uploaded, as determined by - [expectedCreativeCount][google.ads.admanager.v1.CreativePlaceholder.expected_creative_count]. - For example a line item specifies 750x350, 400x200, but - only a 750x350 was uploaded. Or line item specifies - 750x350 with an expected count of 2, but only one was - uploaded. - - The [appliedLabels][Creative.applied_labels] of an - associated [Creative][google.ads.admanager.v1.Creative] - do not match the - [effectiveAppliedLabels][CreativePlaceholder.effective_applied_labels] - of the line item. For example if a line item specifies - 750x350 with a foo applied label, but a 750x350 creative - without an applied label was uploaded. - notes (str): - Optional. Provides any additional notes that - may annotate LineItem. This field has a maximum - length of 65,535 characters. - priority (int): - Optional. Priority of the LineItem for delivery. Valid - values range from 1 to 16. This field can only be changed by - certain networks, otherwise a ``PERMISSION_DENIED`` error - will occur. - - The following list shows the default, minimum, and maximum - priority values for each [LineItemType][LineItemType]: - formatted as ``LineItemType``: default priority (minimum - priority, maximum priority): - - - ``SPONSORSHIP``: 4 (2,5) - - ``STANDARD``: 8 (6,10) - - ``NETWORK``: 12 (11, 14) - - ``BULK``: 12 (11, 14) - - ``PRICE_PRIORITY``: 12 (11, 14) - - ``HOUSE``: 16 (15, 16) - - ``CLICK_TRACKING``: 16 (1, 16) - - ``AD_EXCHANGE``: 12 (1, 16) - - ``ADSENSE``: 12 (1, 16) - - ``BUMPER``: 16 (15, 16) - reservation_status (google.ads.admanager_v1.types.ReservationStatusEnum.ReservationStatus): - Output only. Describes whether or not - inventory has been reserved for the line item. - web_property_code (str): - Optional. The web property code used for dynamic allocation - line items. This web property is only required with line - item types [AD_EXCHANGE][LineItemType.AD_EXCHANGE] and - [ADSENSE][LineItemType.ADSENSE]. - creative_placeholders (MutableSequence[google.ads.admanager_v1.types.CreativePlaceholder]): - Required. Details about the creatives that - are expected to serve through this LineItem. - status (google.ads.admanager_v1.types.ComputedStatusEnum.ComputedStatus): - Output only. The status of the LineItem. - primary_goal (google.ads.admanager_v1.types.Goal): - Required. The primary goal that this LineItem - is associated with, which is used in its pacing - and budgeting. - impression_limit (google.ads.admanager_v1.types.Goal): - Optional. The impression limit for the LineItem. This field - is meaningful only if the - [LineItem.line_item_type][google.ads.admanager.v1.LineItem.line_item_type] - is [LineItemType.SPONSORSHIP][] and - [LineItem.cost_type][google.ads.admanager.v1.LineItem.cost_type] - is [CostType.CPM][]. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - archived: bool = proto.Field( - proto.BOOL, - number=14, - ) - contracted_units_bought: int = proto.Field( - proto.INT64, - number=18, - ) - cost_per_unit: money_pb2.Money = proto.Field( - proto.MESSAGE, - number=15, - message=money_pb2.Money, - ) - cost_type: line_item_enums.LineItemCostTypeEnum.LineItemCostType = proto.Field( - proto.ENUM, - number=19, - enum=line_item_enums.LineItemCostTypeEnum.LineItemCostType, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=31, - message=timestamp_pb2.Timestamp, - ) - creative_rotation_type: line_item_enums.CreativeRotationTypeEnum.CreativeRotationType = proto.Field( - proto.ENUM, - number=22, - enum=line_item_enums.CreativeRotationTypeEnum.CreativeRotationType, - ) - delivery_rate_type: line_item_enums.DeliveryRateTypeEnum.DeliveryRateType = ( - proto.Field( - proto.ENUM, - number=23, - enum=line_item_enums.DeliveryRateTypeEnum.DeliveryRateType, - ) - ) - discount: float = proto.Field( - proto.DOUBLE, - number=13, - ) - discount_type: line_item_enums.LineItemDiscountTypeEnum.LineItemDiscountType = ( - proto.Field( - proto.ENUM, - number=24, - enum=line_item_enums.LineItemDiscountTypeEnum.LineItemDiscountType, - ) - ) - environment_type: environment_type_enum.EnvironmentTypeEnum.EnvironmentType = ( - proto.Field( - proto.ENUM, - number=25, - enum=environment_type_enum.EnvironmentTypeEnum.EnvironmentType, - ) - ) - external_id: str = proto.Field( - proto.STRING, - number=5, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - auto_extension_days: int = proto.Field( - proto.INT32, - number=8, - ) - unlimited_end_time: bool = proto.Field( - proto.BOOL, - number=9, - ) - last_modified_by_app: str = proto.Field( - proto.STRING, - number=17, - ) - line_item_type: line_item_enums.LineItemTypeEnum.LineItemType = proto.Field( - proto.ENUM, - number=10, - enum=line_item_enums.LineItemTypeEnum.LineItemType, - ) - missing_creatives: bool = proto.Field( - proto.BOOL, - number=16, - ) - notes: str = proto.Field( - proto.STRING, - number=20, - ) - priority: int = proto.Field( - proto.INT64, - number=11, - ) - reservation_status: line_item_enums.ReservationStatusEnum.ReservationStatus = ( - proto.Field( - proto.ENUM, - number=26, - enum=line_item_enums.ReservationStatusEnum.ReservationStatus, - ) - ) - web_property_code: str = proto.Field( - proto.STRING, - number=21, - ) - creative_placeholders: MutableSequence[ - creative_placeholder.CreativePlaceholder - ] = proto.RepeatedField( - proto.MESSAGE, - number=27, - message=creative_placeholder.CreativePlaceholder, - ) - status: computed_status_enum.ComputedStatusEnum.ComputedStatus = proto.Field( - proto.ENUM, - number=28, - enum=computed_status_enum.ComputedStatusEnum.ComputedStatus, - ) - primary_goal: goal.Goal = proto.Field( - proto.MESSAGE, - number=29, - message=goal.Goal, - ) - impression_limit: goal.Goal = proto.Field( - proto.MESSAGE, - number=30, - message=goal.Goal, - ) - - -class GetLineItemRequest(proto.Message): - r"""Request object for GetLineItem method. - - Attributes: - name (str): - Required. The resource name of the LineItem. Format: - ``networks/{network_code}/orders/{order_id}/lineItems/{line_item_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListLineItemsRequest(proto.Message): - r"""Request object for ListLineItems method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - LineItems. Format: networks/{network_code}/orders/{order_id} - page_size (int): - Optional. The maximum number of LineItems to - return. The service may return fewer than this - value. If unspecified, at most 50 line items - will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. - page_token (str): - Optional. A page token, received from a previous - ``ListLineItems`` call. Provide this to retrieve the - subsequent page. - - When paginating, all other parameters provided to - ``ListLineItems`` must match the call that provided the page - token. - filter (str): - Optional. Expression to filter the response. - See syntax details at - https://developers.google.com/ad-manager/api/beta/filters - order_by (str): - Optional. Expression to specify sorting - order. See syntax details at - https://developers.google.com/ad-manager/api/beta/filters#order - skip (int): - Optional. Number of individual resources to - skip while paginating. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - skip: int = proto.Field( - proto.INT32, - number=6, - ) - - -class ListLineItemsResponse(proto.Message): - r"""Response object for ListLineItemsRequest containing matching - LineItem resources. - - Attributes: - line_items (MutableSequence[google.ads.admanager_v1.types.LineItem]): - The LineItem from the specified network. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - total_size (int): - Total number of LineItems. If a filter was included in the - request, this reflects the total number after the filtering - is applied. - - ``total_size`` will not be calculated in the response unless - it has been included in a response field mask. The response - field mask can be provided to the method by using the URL - parameter ``$fields`` or ``fields``, or by using the - HTTP/gRPC header ``X-Goog-FieldMask``. - - For more information, see - https://developers.google.com/ad-manager/api/beta/field-masks - """ - - @property - def raw_page(self): - return self - - line_items: MutableSequence["LineItem"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="LineItem", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - total_size: int = proto.Field( - proto.INT32, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/network_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/network_messages.py new file mode 100644 index 000000000000..7c815ca0767b --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/network_messages.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Network", + }, +) + + +class Network(proto.Message): + r"""The Network resource. + + Attributes: + name (str): + Identifier. The resource name of the Network. Format: + networks/{network_code} + display_name (str): + Optional. Display name for Network. + network_code (str): + Output only. Network Code. + property_code (str): + Output only. Property code. + time_zone (str): + Output only. Time zone associated with the + delivery of orders and reporting. + currency_code (str): + Output only. Primary currency code, in + ISO-4217 format. + secondary_currency_codes (MutableSequence[str]): + Optional. Currency codes that can be used as + an alternative to the primary currency code for + trafficking Line Items. + effective_root_ad_unit (str): + Output only. Top most `Ad + Unit `__ to which descendant + Ad Units can be added. Format: + networks/{network_code}/adUnit/{ad_unit_id} + test_network (bool): + Output only. Whether this is a test network. + network_id (int): + Output only. Network ID. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + network_code: str = proto.Field( + proto.STRING, + number=3, + ) + property_code: str = proto.Field( + proto.STRING, + number=4, + ) + time_zone: str = proto.Field( + proto.STRING, + number=5, + ) + currency_code: str = proto.Field( + proto.STRING, + number=6, + ) + secondary_currency_codes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + effective_root_ad_unit: str = proto.Field( + proto.STRING, + number=8, + ) + test_network: bool = proto.Field( + proto.BOOL, + number=10, + ) + network_id: int = proto.Field( + proto.INT64, + number=11, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/network_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/network_service.py index 8a564ae54919..7ee37dfea726 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/network_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/network_service.py @@ -19,103 +19,49 @@ import proto # type: ignore +from google.ads.admanager_v1.types import network_messages + __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "Network", "GetNetworkRequest", + "ListNetworksRequest", + "ListNetworksResponse", }, ) -class Network(proto.Message): - r"""The Network resource. +class GetNetworkRequest(proto.Message): + r"""Request to get Network Attributes: name (str): - Identifier. The resource name of the Network. Format: + Required. Resource name of Network. Format: networks/{network_code} - display_name (str): - Optional. Display name for Network. - network_code (str): - Output only. Network Code. - property_code (str): - Output only. Property code. - time_zone (str): - Output only. Time zone associated with the - delivery of orders and reporting. - currency_code (str): - Output only. Primary currency code, in - ISO-4217 format. - secondary_currency_codes (MutableSequence[str]): - Optional. Currency codes that can be used as - an alternative to the primary currency code for - trafficking Line Items. - effective_root_ad_unit (str): - Output only. Top most `Ad - Unit `__ to which descendant - Ad Units can be added. Format: - networks/{network_code}/adUnit/{ad_unit_id} - test_network (bool): - Output only. Whether this is a test network. - network_id (int): - Output only. Network ID. """ name: str = proto.Field( proto.STRING, number=1, ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - network_code: str = proto.Field( - proto.STRING, - number=3, - ) - property_code: str = proto.Field( - proto.STRING, - number=4, - ) - time_zone: str = proto.Field( - proto.STRING, - number=5, - ) - currency_code: str = proto.Field( - proto.STRING, - number=6, - ) - secondary_currency_codes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - effective_root_ad_unit: str = proto.Field( - proto.STRING, - number=8, - ) - test_network: bool = proto.Field( - proto.BOOL, - number=10, - ) - network_id: int = proto.Field( - proto.INT64, - number=11, - ) -class GetNetworkRequest(proto.Message): - r"""Request to get Network +class ListNetworksRequest(proto.Message): + r"""Request object for ``ListNetworks`` method.""" + + +class ListNetworksResponse(proto.Message): + r"""Response object for ``ListNetworks`` method. Attributes: - name (str): - Required. Resource name of Network. Format: - networks/{network_code} + networks (MutableSequence[google.ads.admanager_v1.types.Network]): + The ``Network``\ s a user has access to. """ - name: str = proto.Field( - proto.STRING, + networks: MutableSequence[network_messages.Network] = proto.RepeatedField( + proto.MESSAGE, number=1, + message=network_messages.Network, ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/order_enums.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/order_enums.py new file mode 100644 index 000000000000..1ec6647453c1 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/order_enums.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "OrderStatusEnum", + }, +) + + +class OrderStatusEnum(proto.Message): + r"""Wrapper message for + [OrderStatus][google.ads.admanager.v1.OrderStatusEnum.OrderStatus]. + + """ + + class OrderStatus(proto.Enum): + r"""The status of an Order. + + Values: + ORDER_STATUS_UNSPECIFIED (0): + Default value. This value is unused. + DRAFT (2): + Indicates that the Order has just been + created but no approval has been requested yet. + PENDING_APPROVAL (3): + Indicates that a request for approval for the + Order has been made. + APPROVED (4): + Indicates that the Order has been approved + and is ready to serve. + DISAPPROVED (5): + Indicates that the Order has been disapproved + and is not eligible to serve. + PAUSED (6): + This is a legacy state. Paused status should + be checked on LineItems within the order. + CANCELED (7): + Indicates that the Order has been canceled + and cannot serve. + DELETED (8): + Indicates that the Order has been deleted. + """ + ORDER_STATUS_UNSPECIFIED = 0 + DRAFT = 2 + PENDING_APPROVAL = 3 + APPROVED = 4 + DISAPPROVED = 5 + PAUSED = 6 + CANCELED = 7 + DELETED = 8 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/order_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/order_messages.py new file mode 100644 index 000000000000..4e85a8eea818 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/order_messages.py @@ -0,0 +1,278 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.ads.admanager_v1.types import applied_label, custom_field_value, order_enums + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Order", + }, +) + + +class Order(proto.Message): + r"""The ``Order`` resource. + + Attributes: + name (str): + Identifier. The resource name of the ``Order``. Format: + ``networks/{network_code}/orders/{order_id}`` + order_id (int): + Output only. Order ID. + display_name (str): + Required. The display name of the Order. + This value has a maximum length of 255 + characters. + programmatic (bool): + Optional. Specifies whether or not the Order + is a programmatic order. + trafficker (str): + Required. The resource name of the User responsible for + trafficking the Order. Format: + "networks/{network_code}/users/{user_id}". + advertiser_contacts (MutableSequence[str]): + Optional. The resource names of Contacts from the advertiser + of this Order. Format: + "networks/{network_code}/contacts/{contact_id}". + advertiser (str): + Required. The resource name of the Company, which is of type + Company.Type.ADVERTISER, to which this order belongs. + Format: "networks/{network_code}/companies/{company_id}". + agency_contacts (MutableSequence[str]): + Optional. The resource names of Contacts from the + advertising Agency of this Order. Format: + "networks/{network_code}/contacts/{contact_id}". + agency (str): + Optional. The resource name of the Company, which is of type + Company.Type.AGENCY, with which this order is associated. + Format: "networks/{network_code}/companies/{company_id}". + applied_teams (MutableSequence[str]): + Optional. The resource names of Teams directly applied to + this Order. Format: + "networks/{network_code}/teams/{team_id}". + effective_teams (MutableSequence[str]): + Output only. The resource names of Teams applied to this + Order including inherited values. Format: + "networks/{network_code}/teams/{team_id}". + creator (str): + Output only. The resource name of the User who created the + Order on behalf of the advertiser. This value is assigned by + Google. Format: "networks/{network_code}/users/{user_id}". + currency_code (str): + Output only. The ISO 4217 3-letter currency + code for the currency used by the Order. This + value is the network's currency code. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instant at which the Order and its + associated line items are eligible to begin serving. This + attribute is derived from the line item of the order that + has the earliest LineItem.start_time. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instant at which the Order and its + associated line items stop being served. This attribute is + derived from the line item of the order that has the latest + LineItem.end_time. + unlimited_end_time (bool): + Output only. Indicates whether or not this + Order has an end time. + external_order_id (int): + Optional. An arbitrary ID to associate to the + Order, which can be used as a key to an external + system. + archived (bool): + Output only. The archival status of the + Order. + last_modified_by_app (str): + Output only. The application which modified + this order. This attribute is assigned by + Google. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instant this Order was last + modified. + notes (str): + Optional. Provides any additional notes that + may annotate the Order. This attribute has a + maximum length of 65,535 characters. + po_number (str): + Optional. The purchase order number for the + Order. This value has a maximum length of 63 + characters. + status (google.ads.admanager_v1.types.OrderStatusEnum.OrderStatus): + Output only. The status of the Order. + salesperson (str): + Optional. The resource name of the User responsible for the + sales of the Order. Format: + "networks/{network_code}/users/{user_id}". + secondary_salespeople (MutableSequence[str]): + Optional. Unordered list. The resource names of the + secondary salespeople associated with the order. Format: + "networks/{network_code}/users/{user_id}". + secondary_traffickers (MutableSequence[str]): + Optional. Unordered list. The resource names of the + secondary traffickers associated with the order. Format: + "networks/{network_code}/users/{user_id}". + applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): + Optional. The set of labels applied directly + to this order. + effective_applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): + Output only. Contains the set of labels + applied directly to the order as well as those + inherited from the company that owns the order. + If a label has been negated, only the negated + label is returned. This field is assigned by + Google. + custom_field_values (MutableSequence[google.ads.admanager_v1.types.CustomFieldValue]): + Optional. The set of custom field values to + this order. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + order_id: int = proto.Field( + proto.INT64, + number=4, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + programmatic: bool = proto.Field( + proto.BOOL, + number=3, + ) + trafficker: str = proto.Field( + proto.STRING, + number=23, + ) + advertiser_contacts: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + advertiser: str = proto.Field( + proto.STRING, + number=6, + ) + agency_contacts: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + agency: str = proto.Field( + proto.STRING, + number=8, + ) + applied_teams: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + effective_teams: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=28, + ) + creator: str = proto.Field( + proto.STRING, + number=10, + ) + currency_code: str = proto.Field( + proto.STRING, + number=11, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=19, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + unlimited_end_time: bool = proto.Field( + proto.BOOL, + number=45, + ) + external_order_id: int = proto.Field( + proto.INT64, + number=13, + ) + archived: bool = proto.Field( + proto.BOOL, + number=14, + ) + last_modified_by_app: str = proto.Field( + proto.STRING, + number=15, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp_pb2.Timestamp, + ) + notes: str = proto.Field( + proto.STRING, + number=17, + ) + po_number: str = proto.Field( + proto.STRING, + number=18, + ) + status: order_enums.OrderStatusEnum.OrderStatus = proto.Field( + proto.ENUM, + number=20, + enum=order_enums.OrderStatusEnum.OrderStatus, + ) + salesperson: str = proto.Field( + proto.STRING, + number=21, + ) + secondary_salespeople: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22, + ) + secondary_traffickers: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=24, + ) + applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message=applied_label.AppliedLabel, + ) + effective_applied_labels: MutableSequence[ + applied_label.AppliedLabel + ] = proto.RepeatedField( + proto.MESSAGE, + number=26, + message=applied_label.AppliedLabel, + ) + custom_field_values: MutableSequence[ + custom_field_value.CustomFieldValue + ] = proto.RepeatedField( + proto.MESSAGE, + number=38, + message=custom_field_value.CustomFieldValue, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/order_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/order_service.py index 3884d4acf7ed..3b13ef2d8105 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/order_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/order_service.py @@ -17,15 +17,13 @@ from typing import MutableMapping, MutableSequence -from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore -from google.ads.admanager_v1.types import applied_label +from google.ads.admanager_v1.types import order_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "Order", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", @@ -33,271 +31,6 @@ ) -class Order(proto.Message): - r"""The ``Order`` resource. - - Attributes: - name (str): - Identifier. The resource name of the ``Order``. Format: - ``networks/{network_code}/orders/{order_id}`` - order_id (int): - Output only. Order ID. - display_name (str): - Required. The display name of the Order. - This value is required to create an order and - has a maximum length of 255 characters. - programmatic (bool): - Optional. Specifies whether or not the Order - is a programmatic order. - trafficker (str): - Required. The resource name of the User responsible for - trafficking the Order. Format: - "networks/{network_code}/users/{user_id}". - advertiser_contacts (MutableSequence[str]): - Optional. The resource names of Contacts from the advertiser - of this Order. Format: - "networks/{network_code}/contacts/{contact_id}". - advertiser (str): - Required. The resource name of the Company, which is of type - Company.Type.ADVERTISER, to which this order belongs. This - attribute is required. Format: - "networks/{network_code}/companies/{company_id}". - agency_contacts (MutableSequence[str]): - Optional. The resource names of Contacts from the - advertising Agency of this Order. Format: - "networks/{network_code}/contacts/{contact_id}". - agency (str): - Optional. The resource name of the Company, which is of type - Company.Type.AGENCY, with which this order is associated. - Format: "networks/{network_code}/companies/{company_id}". - applied_teams (MutableSequence[str]): - Optional. The resource names of Teams directly applied to - this Order. Format: - "networks/{network_code}/teams/{team_id}". - effective_teams (MutableSequence[str]): - Output only. The resource names of Teams applied to this - Order including inherited values. Format: - "networks/{network_code}/teams/{team_id}". - creator (str): - Output only. The resource name of the User who created the - Order on behalf of the advertiser. This value is assigned by - Google. Format: "networks/{network_code}/users/{user_id}". - currency_code (str): - Output only. The ISO 4217 3-letter currency - code for the currency used by the Order. This - value is the network's currency code. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant at which the Order and its - associated line items are eligible to begin serving. This - attribute is derived from the line item of the order that - has the earliest LineItem.start_time. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant at which the Order and its - associated line items stop being served. This attribute is - derived from the line item of the order that has the latest - LineItem.end_time. - external_order_id (int): - Optional. An arbitrary ID to associate to the - Order, which can be used as a key to an external - system. - archived (bool): - Output only. The archival status of the - Order. - last_modified_by_app (str): - Output only. The application which modified - this order. This attribute is assigned by - Google. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant this Order was last - modified. - notes (str): - Optional. Provides any additional notes that - may annotate the Order. This attribute has a - maximum length of 65,535 characters. - po_number (str): - Optional. The purchase order number for the - Order. This value has a maximum length of 63 - characters. - status (google.ads.admanager_v1.types.Order.Status): - Output only. The status of the Order. - salesperson (str): - Optional. The resource name of the User responsible for the - sales of the Order. Format: - "networks/{network_code}/users/{user_id}". - secondary_salespeople (MutableSequence[str]): - Optional. The resource names of the secondary salespeople - associated with the order. Format: - "networks/{network_code}/users/{user_id}". - secondary_traffickers (MutableSequence[str]): - Optional. The resource names of the secondary traffickers - associated with the order. Format: - "networks/{network_code}/users/{user_id}". - applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): - Optional. The set of labels applied directly - to this order. - effective_applied_labels (MutableSequence[google.ads.admanager_v1.types.AppliedLabel]): - Output only. Contains the set of labels - applied directly to the order as well as those - inherited from the company that owns the order. - If a label has been negated, only the negated - label is returned. This field is assigned by - Google. - """ - - class Status(proto.Enum): - r"""The status of an Order. - - Values: - STATUS_UNSPECIFIED (0): - Default value. This value is unused. - DRAFT (2): - Indicates that the Order has just been - created but no approval has been requested yet. - PENDING_APPROVAL (3): - Indicates that a request for approval for the - Order has been made. - APPROVED (4): - Indicates that the Order has been approved - and is ready to serve. - DISAPPROVED (5): - Indicates that the Order has been disapproved - and is not eligible to serve. - PAUSED (6): - This is a legacy state. Paused status should - be checked on LineItems within the order. - CANCELED (7): - Indicates that the Order has been canceled - and cannot serve. - DELETED (8): - Indicates that the Order has been deleted. - """ - STATUS_UNSPECIFIED = 0 - DRAFT = 2 - PENDING_APPROVAL = 3 - APPROVED = 4 - DISAPPROVED = 5 - PAUSED = 6 - CANCELED = 7 - DELETED = 8 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - order_id: int = proto.Field( - proto.INT64, - number=4, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - programmatic: bool = proto.Field( - proto.BOOL, - number=3, - ) - trafficker: str = proto.Field( - proto.STRING, - number=23, - ) - advertiser_contacts: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - advertiser: str = proto.Field( - proto.STRING, - number=6, - ) - agency_contacts: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - agency: str = proto.Field( - proto.STRING, - number=8, - ) - applied_teams: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=9, - ) - effective_teams: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=28, - ) - creator: str = proto.Field( - proto.STRING, - number=10, - ) - currency_code: str = proto.Field( - proto.STRING, - number=11, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=19, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - external_order_id: int = proto.Field( - proto.INT64, - number=13, - ) - archived: bool = proto.Field( - proto.BOOL, - number=14, - ) - last_modified_by_app: str = proto.Field( - proto.STRING, - number=15, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=16, - message=timestamp_pb2.Timestamp, - ) - notes: str = proto.Field( - proto.STRING, - number=17, - ) - po_number: str = proto.Field( - proto.STRING, - number=18, - ) - status: Status = proto.Field( - proto.ENUM, - number=20, - enum=Status, - ) - salesperson: str = proto.Field( - proto.STRING, - number=21, - ) - secondary_salespeople: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=22, - ) - secondary_traffickers: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=24, - ) - applied_labels: MutableSequence[applied_label.AppliedLabel] = proto.RepeatedField( - proto.MESSAGE, - number=25, - message=applied_label.AppliedLabel, - ) - effective_applied_labels: MutableSequence[ - applied_label.AppliedLabel - ] = proto.RepeatedField( - proto.MESSAGE, - number=26, - message=applied_label.AppliedLabel, - ) - - class GetOrderRequest(proto.Message): r"""Request object for ``GetOrder`` method. @@ -402,10 +135,10 @@ class ListOrdersResponse(proto.Message): def raw_page(self): return self - orders: MutableSequence["Order"] = proto.RepeatedField( + orders: MutableSequence[order_messages.Order] = proto.RepeatedField( proto.MESSAGE, number=1, - message="Order", + message=order_messages.Order, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_messages.py new file mode 100644 index 000000000000..5705c4acda2f --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_messages.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.ads.admanager_v1.types import placement_enums + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Placement", + }, +) + + +class Placement(proto.Message): + r"""The ``Placement`` resource. + + Attributes: + name (str): + Identifier. The resource name of the ``Placement``. Format: + ``networks/{network_code}/placements/{placement_id}`` + placement_id (int): + Output only. ``Placement`` ID. + display_name (str): + Required. The display name of the placement. + Its maximum length is 255 characters. + description (str): + Optional. A description of the Placement. + This value is optional and its maximum length is + 65,535 characters. + placement_code (str): + Output only. A string used to uniquely + identify the Placement for purposes of serving + the ad. This attribute is read-only and is + assigned by Google when a placement is created. + status (google.ads.admanager_v1.types.PlacementStatusEnum.PlacementStatus): + Output only. The status of the Placement. + This attribute is read-only. + targeted_ad_units (MutableSequence[str]): + Optional. The resource names of AdUnits that constitute the + Placement. Format: + "networks/{network_code}/adUnits/{ad_unit_id}". + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instant this Placement was + last modified. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + placement_id: int = proto.Field( + proto.INT64, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + placement_code: str = proto.Field( + proto.STRING, + number=5, + ) + status: placement_enums.PlacementStatusEnum.PlacementStatus = proto.Field( + proto.ENUM, + number=6, + enum=placement_enums.PlacementStatusEnum.PlacementStatus, + ) + targeted_ad_units: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_service.py index 5221b8ce4a6f..6d313e514801 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/placement_service.py @@ -17,15 +17,13 @@ from typing import MutableMapping, MutableSequence -from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore -from google.ads.admanager_v1.types import placement_enums +from google.ads.admanager_v1.types import placement_messages __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "Placement", "GetPlacementRequest", "ListPlacementsRequest", "ListPlacementsResponse", @@ -33,75 +31,6 @@ ) -class Placement(proto.Message): - r"""The ``Placement`` resource. - - Attributes: - name (str): - Identifier. The resource name of the ``Placement``. Format: - ``networks/{network_code}/placements/{placement_id}`` - placement_id (int): - Output only. ``Placement`` ID. - display_name (str): - Required. The display name of the placement. - Its maximum length is 255 characters. - description (str): - Optional. A description of the Placement. - This value is optional and its maximum length is - 65,535 characters. - placement_code (str): - Output only. A string used to uniquely - identify the Placement for purposes of serving - the ad. This attribute is read-only and is - assigned by Google when a placement is created. - status (google.ads.admanager_v1.types.PlacementStatusEnum.PlacementStatus): - Output only. The status of the Placement. - This attribute is read-only. - targeted_ad_units (MutableSequence[str]): - Optional. The resource names of AdUnits that constitute the - Placement. Format: - "networks/{network_code}/adUnits/{ad_unit_id}". - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The instant this Placement was - last modified. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - placement_id: int = proto.Field( - proto.INT64, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - description: str = proto.Field( - proto.STRING, - number=4, - ) - placement_code: str = proto.Field( - proto.STRING, - number=5, - ) - status: placement_enums.PlacementStatusEnum.PlacementStatus = proto.Field( - proto.ENUM, - number=6, - enum=placement_enums.PlacementStatusEnum.PlacementStatus, - ) - targeted_ad_units: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - - class GetPlacementRequest(proto.Message): r"""Request object for ``GetPlacement`` method. @@ -207,10 +136,10 @@ class ListPlacementsResponse(proto.Message): def raw_page(self): return self - placements: MutableSequence["Placement"] = proto.RepeatedField( + placements: MutableSequence[placement_messages.Placement] = proto.RepeatedField( proto.MESSAGE, number=1, - message="Placement", + message=placement_messages.Placement, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/report_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/report_service.py index f479273afbb6..9cbecb1b5556 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/report_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/report_service.py @@ -17,143 +17,4654 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ "Report", - "ExportSavedReportRequest", - "ExportSavedReportMetadata", - "ExportSavedReportResponse", + "RunReportRequest", + "RunReportMetadata", + "RunReportResponse", + "GetReportRequest", + "ListReportsRequest", + "ListReportsResponse", + "CreateReportRequest", + "UpdateReportRequest", + "FetchReportResultRowsRequest", + "FetchReportResultRowsResponse", + "ReportDefinition", + "ScheduleOptions", + "Schedule", }, ) class Report(proto.Message): - r"""The Report resource. + r"""The ``Report`` resource. Attributes: name (str): - Identifier. The resource name of the Report. Report resource + Identifier. The resource name of the report. Report resource name have the form: ``networks/{network_code}/reports/{report_id}`` + report_id (int): + Output only. Report ID. + visibility (google.ads.admanager_v1.types.Report.Visibility): + Optional. The visibility of a report. + report_definition (google.ads.admanager_v1.types.ReportDefinition): + Required. The report definition of the + report. + display_name (str): + Optional. Display name for the report. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instant this report was last + modified. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instant this report was + created. + locale (str): + Output only. The locale of this report. + Locale is set from the user's locale at the time + of the request. Locale can not be modified. + schedule_options (google.ads.admanager_v1.types.ScheduleOptions): + Optional. The schedule options of this + report. """ + class TimePeriodColumn(proto.Enum): + r"""Valid time period columns. + + Values: + TIME_PERIOD_COLUMN_UNSPECIFIED (0): + Default value. Report will have no time + period column. + TIME_PERIOD_COLUMN_DATE (1): + A column for each date in the report. + TIME_PERIOD_COLUMN_WEEK (2): + A column for each week in the report. + TIME_PERIOD_COLUMN_MONTH (3): + A column for each month in the report. + TIME_PERIOD_COLUMN_QUARTER (4): + A column for each quarter in the report. + """ + TIME_PERIOD_COLUMN_UNSPECIFIED = 0 + TIME_PERIOD_COLUMN_DATE = 1 + TIME_PERIOD_COLUMN_WEEK = 2 + TIME_PERIOD_COLUMN_MONTH = 3 + TIME_PERIOD_COLUMN_QUARTER = 4 + + class Dimension(proto.Enum): + r"""Reporting dimensions. + + Values: + DIMENSION_UNSPECIFIED (0): + Default value. This value is unused. + ADVERTISER_DOMAIN_NAME (242): + The domain name of the advertiser. + ADVERTISER_EXTERNAL_ID (228): + The ID used in an external system for + advertiser identification + ADVERTISER_ID (131): + The ID of an advertiser company assigned to + an order + ADVERTISER_LABELS (230): + Labels applied to the advertiser + can be used for either competitive exclusion or + ad exclusion + ADVERTISER_LABEL_IDS (229): + Label ids applied to the advertiser + can be used for either competitive exclusion or + ad exclusion + ADVERTISER_NAME (132): + The name of an advertiser company assigned to + an order + ADVERTISER_PRIMARY_CONTACT (227): + The name of the contact associated with an + advertiser company + AD_LOCATION (390): + Shows an ENUM value describing whether a + given piece of publisher inventory was above + (ATF) or below the fold (BTF) of a page. + AD_LOCATION_NAME (391): + Shows a localized string describing whether a + given piece of publisher inventory was above + (ATF) or below the fold (BTF) of a page. + AD_UNIT_CODE (64): + The code of the ad unit where the ad was + requested. + AD_UNIT_CODE_LEVEL_1 (65): + The code of the first level ad unit of the ad + unit where the ad was requested. + AD_UNIT_CODE_LEVEL_10 (74): + The code of the tenth level ad unit of the ad + unit where the ad was requested. + AD_UNIT_CODE_LEVEL_11 (75): + The code of the eleventh level ad unit of the + ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_12 (76): + The code of the twelfth level ad unit of the + ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_13 (77): + The code of the thirteenth level ad unit of + the ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_14 (78): + The code of the fourteenth level ad unit of + the ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_15 (79): + The code of the fifteenth level ad unit of + the ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_16 (80): + The code of the sixteenth level ad unit of + the ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_2 (66): + The code of the second level ad unit of the + ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_3 (67): + The code of the third level ad unit of the ad + unit where the ad was requested. + AD_UNIT_CODE_LEVEL_4 (68): + The code of the fourth level ad unit of the + ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_5 (69): + The code of the fifth level ad unit of the ad + unit where the ad was requested. + AD_UNIT_CODE_LEVEL_6 (70): + The code of the sixth level ad unit of the ad + unit where the ad was requested. + AD_UNIT_CODE_LEVEL_7 (71): + The code of the seventh level ad unit of the + ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_8 (72): + The code of the eighth level ad unit of the + ad unit where the ad was requested. + AD_UNIT_CODE_LEVEL_9 (73): + The code of the ninth level ad unit of the ad + unit where the ad was requested. + AD_UNIT_DEPTH (101): + The depth of the ad unit's hierarchy + AD_UNIT_ID (25): + The ID of the ad unit where the ad was + requested. + AD_UNIT_ID_ALL_LEVEL (27): + The full hierarchy of ad unit IDs where the + ad was requested, from root to leaf, excluding + the root ad unit ID. + AD_UNIT_ID_LEVEL_1 (30): + The first level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_10 (48): + The tenth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_11 (50): + The eleventh level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_12 (52): + The twelfth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_13 (54): + The thirteenth level ad unit ID of the ad + unit where the ad was requested. + AD_UNIT_ID_LEVEL_14 (56): + The fourteenth level ad unit ID of the ad + unit where the ad was requested. + AD_UNIT_ID_LEVEL_15 (58): + The fifteenth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_16 (60): + The sixteenth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_2 (32): + The second level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_3 (34): + The third level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_4 (36): + The fourth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_5 (38): + The fifth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_6 (40): + The sixth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_7 (42): + The seventh level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_8 (44): + The eighth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_LEVEL_9 (46): + The ninth level ad unit ID of the ad unit + where the ad was requested. + AD_UNIT_ID_TOP_LEVEL (142): + The top-level ad unit ID of the ad unit where + the ad was requested. + AD_UNIT_NAME (26): + The name of the ad unit where the ad was + requested. + AD_UNIT_NAME_ALL_LEVEL (29): + The full hierarchy of ad unit names where the + ad was requested, from root to leaf, excluding + the root ad unit name. + AD_UNIT_NAME_LEVEL_1 (31): + The first level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_10 (49): + The tenth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_11 (51): + The eleventh level ad unit name of the ad + unit where the ad was requested. + AD_UNIT_NAME_LEVEL_12 (53): + The twelfth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_13 (55): + The thirteenth level ad unit name of the ad + unit where the ad was requested. + AD_UNIT_NAME_LEVEL_14 (57): + The fourteenth level ad unit name of the ad + unit where the ad was requested. + AD_UNIT_NAME_LEVEL_15 (59): + The fifteenth level ad unit name of the ad + unit where the ad was requested. + AD_UNIT_NAME_LEVEL_16 (61): + The sixteenth level ad unit name of the ad + unit where the ad was requested. + AD_UNIT_NAME_LEVEL_2 (33): + The second level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_3 (35): + The third level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_4 (37): + The fourth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_5 (39): + The fifth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_6 (41): + The sixth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_7 (43): + The seventh level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_8 (45): + The eighth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_LEVEL_9 (47): + The ninth level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_NAME_TOP_LEVEL (143): + The top-level ad unit name of the ad unit + where the ad was requested. + AD_UNIT_REWARD_AMOUNT (63): + The reward amount of the ad unit where the ad + was requested. + AD_UNIT_REWARD_TYPE (62): + The reward type of the ad unit where the ad + was requested. + AD_UNIT_STATUS (206): + The status of the ad unit + AD_UNIT_STATUS_NAME (207): + The name of the status of the ad unit + APP_VERSION (392): + The app version. + BACKFILL_ADVERTISER_EXTERNAL_ID (349): + The ID used in an external system for + advertiser identification + BACKFILL_ADVERTISER_ID (346): + The ID of an advertiser company assigned to a + backfill order + BACKFILL_ADVERTISER_LABELS (351): + Labels applied to the advertiser + can be used for either competitive exclusion or + ad exclusion + BACKFILL_ADVERTISER_LABEL_IDS (350): + Label ids applied to the advertiser + can be used for either competitive exclusion or + ad exclusion + BACKFILL_ADVERTISER_NAME (347): + The name of an advertiser company assigned to + a backfill order + BACKFILL_ADVERTISER_PRIMARY_CONTACT (348): + The name of the contact associated with an + advertiser company + BACKFILL_CREATIVE_BILLING_TYPE (378): + Enum value of Backfill creative billing type + BACKFILL_CREATIVE_BILLING_TYPE_NAME (379): + Localized string value of Backfill creative + billing type + BACKFILL_CREATIVE_CLICK_THROUGH_URL (376): + Represents the click-through URL of a + Backfill creative + BACKFILL_CREATIVE_ID (370): + The ID of a Backfill creative + BACKFILL_CREATIVE_NAME (371): + Backfill creative name + BACKFILL_CREATIVE_THIRD_PARTY_VENDOR (377): + Third party vendor name of a Backfill + creative + BACKFILL_CREATIVE_TYPE (374): + Enum value of Backfill creative type + BACKFILL_CREATIVE_TYPE_NAME (375): + Localized string name of Backfill creative + type + BACKFILL_LINE_ITEM_ARCHIVED (278): + Whether a Backfill line item is archived. + BACKFILL_LINE_ITEM_COMPANION_DELIVERY_OPTION (258): + Backfill line item comanion delivery option + ENUM value. + BACKFILL_LINE_ITEM_COMPANION_DELIVERY_OPTION_NAME (259): + Localized Backfill line item comanion + delivery option name. + BACKFILL_LINE_ITEM_COMPUTED_STATUS (296): + The computed status of the BackfillLineItem. + BACKFILL_LINE_ITEM_COMPUTED_STATUS_NAME (297): + The localized name of the computed status of + the BackfillLineItem. + BACKFILL_LINE_ITEM_CONTRACTED_QUANTITY (280): + The contracted units bought for the Backfill + line item. + BACKFILL_LINE_ITEM_COST_PER_UNIT (272): + The cost per unit of the Backfill line item. + BACKFILL_LINE_ITEM_COST_TYPE (264): + Backfill line item cost type ENUM value. + BACKFILL_LINE_ITEM_COST_TYPE_NAME (265): + Localized Backfill line item cost type name. + BACKFILL_LINE_ITEM_CREATIVE_END_DATE (381): + Represent the end date of a Backfill creative + associated with a Backfill line item + BACKFILL_LINE_ITEM_CREATIVE_ROTATION_TYPE (290): + The creative rotation type of the + BackfillLineItem. + BACKFILL_LINE_ITEM_CREATIVE_ROTATION_TYPE_NAME (291): + The localized name of the creative rotation + type of the BackfillLineItem. + BACKFILL_LINE_ITEM_CREATIVE_START_DATE (380): + Represent the start date of a Backfill + creative associated with a Backfill line item + BACKFILL_LINE_ITEM_CURRENCY_CODE (288): + The 3 letter currency code of the Backfill + line item + BACKFILL_LINE_ITEM_DELIVERY_INDICATOR (274): + The progress made for the delivery of the + Backfill line item. + BACKFILL_LINE_ITEM_DELIVERY_RATE_TYPE (292): + The delivery rate type of the + BackfillLineItem. + BACKFILL_LINE_ITEM_DELIVERY_RATE_TYPE_NAME (293): + The localized name of the delivery rate type + of the BackfillLineItem. + BACKFILL_LINE_ITEM_DISCOUNT_ABSOLUTE (294): + The discount of the BackfillLineItem in whole + units in the BackfillLineItem's currency code, + or if unspecified the Network's currency code. + BACKFILL_LINE_ITEM_DISCOUNT_PERCENTAGE (295): + The discount of the BackfillLineItem in + percentage. + BACKFILL_LINE_ITEM_END_DATE (267): + The end date of the Backfill line item. + BACKFILL_LINE_ITEM_END_DATE_TIME (269): + The end date and time of the Backfill line + item. + BACKFILL_LINE_ITEM_ENVIRONMENT_TYPE (302): + The ENUM value of the environment a Backfill + line item is targeting. + BACKFILL_LINE_ITEM_ENVIRONMENT_TYPE_NAME (257): + The localized name of the environment a + Backfill line item is targeting. + BACKFILL_LINE_ITEM_EXTERNAL_DEAL_ID (285): + The deal ID of the Backfill line item. Set + for Programmatic Direct campaigns. + BACKFILL_LINE_ITEM_EXTERNAL_ID (273): + The external ID of the Backfill line item. + BACKFILL_LINE_ITEM_FREQUENCY_CAP (303): + The frequency cap of the Backfill line item + (descriptive string). + BACKFILL_LINE_ITEM_ID (298): + Backfill line item ID. + BACKFILL_LINE_ITEM_LAST_MODIFIED_BY_APP (289): + The application that last modified the + Backfill line item. + BACKFILL_LINE_ITEM_LIFETIME_CLICKS (283): + The total number of clicks delivered of the + lifetime of the Backfill line item. + BACKFILL_LINE_ITEM_LIFETIME_IMPRESSIONS (282): + The total number of impressions delivered + over the lifetime of the Backfill line item. + BACKFILL_LINE_ITEM_LIFETIME_VIEWABLE_IMPRESSIONS (284): + The total number of viewable impressions + delivered over the lifetime of the Backfill line + item. + BACKFILL_LINE_ITEM_MAKEGOOD (276): + Whether or not the Backfill line item is + Makegood. Makegood refers to free inventory + offered to buyers to compensate for mistakes or + under-delivery in the original campaigns. + BACKFILL_LINE_ITEM_NAME (299): + Backfill line item name. + BACKFILL_LINE_ITEM_NON_CPD_BOOKED_REVENUE (286): + The cost of booking for the Backfill line + item (non-CPD). + BACKFILL_LINE_ITEM_OPTIMIZABLE (277): + Whether a Backfill line item is eligible for + opitimization. + BACKFILL_LINE_ITEM_PRIMARY_GOAL_TYPE (262): + Goal type ENUM value of the primary goal of + the Backfill line item. + BACKFILL_LINE_ITEM_PRIMARY_GOAL_TYPE_NAME (263): + Localized goal type name of the primary goal + of the Backfill line item. + BACKFILL_LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE (260): + Unit type ENUM value of the primary goal of + the Backfill line item. + BACKFILL_LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE_NAME (261): + Localized unit type name of the primary goal + of the Backfill line item. + BACKFILL_LINE_ITEM_PRIORITY (266): + The priority of this Backfill line item as a + value between 1 and 16. In general, a lower + priority means more serving priority for the + Backfill line item. + BACKFILL_LINE_ITEM_RESERVATION_STATUS (306): + ENUM value describing the state of inventory + reservation for the BackfillLineItem. + BACKFILL_LINE_ITEM_RESERVATION_STATUS_NAME (307): + Localized string describing the state of + inventory reservation for the BackfillLineItem. + BACKFILL_LINE_ITEM_START_DATE (268): + The start date of the Backfill line item. + BACKFILL_LINE_ITEM_START_DATE_TIME (270): + The start date and time of the Backfill line + item. + BACKFILL_LINE_ITEM_TYPE (300): + Backfill line item type ENUM value. + BACKFILL_LINE_ITEM_TYPE_NAME (301): + Localized Backfill line item type name. + BACKFILL_LINE_ITEM_UNLIMITED_END (271): + Whether the Backfill line item end time and + end date is set to effectively never end. + BACKFILL_LINE_ITEM_VALUE_COST_PER_UNIT (275): + The artificial cost per unit used by the Ad + server to help rank inventory. + BACKFILL_LINE_ITEM_WEB_PROPERTY_CODE (287): + The web property code used for dynamic + allocation Backfill line items. + BACKFILL_MASTER_COMPANION_CREATIVE_ID (372): + The ID of Backfill creative, includes regular + creatives, and master and companions in case of + creative sets + BACKFILL_MASTER_COMPANION_CREATIVE_NAME (373): + Name of Backfill creative, includes regular + creatives, and master and companions in case of + creative sets + BACKFILL_ORDER_AGENCY (313): + Backfill order agency. + BACKFILL_ORDER_AGENCY_ID (314): + Backfill order agency ID. + BACKFILL_ORDER_BOOKED_CPC (315): + Backfill order booked CPC. + BACKFILL_ORDER_BOOKED_CPM (316): + Backfill order booked CPM. + BACKFILL_ORDER_DELIVERY_STATUS (340): + Backfill order delivery status ENUM value. + BACKFILL_ORDER_DELIVERY_STATUS_NAME (341): + Backfill order delivery status localized + name. + BACKFILL_ORDER_END_DATE (317): + Backfill order end date. + BACKFILL_ORDER_END_DATE_TIME (319): + Backfill order end date and time. + BACKFILL_ORDER_EXTERNAL_ID (320): + Backfill order external ID. + BACKFILL_ORDER_ID (338): + Backfill order id. + BACKFILL_ORDER_LABELS (334): + Backfill order labels. + BACKFILL_ORDER_LABEL_IDS (335): + Backfill order labels IDs. + BACKFILL_ORDER_LIFETIME_CLICKS (322): + Backfill order lifetime clicks. + BACKFILL_ORDER_LIFETIME_IMPRESSIONS (323): + Backfill order lifetime impressions. + BACKFILL_ORDER_NAME (339): + Backfill order name. + BACKFILL_ORDER_PO_NUMBER (324): + Backfill order PO number. + BACKFILL_ORDER_PROGRAMMATIC (321): + Whether the Backfill order is programmatic. + BACKFILL_ORDER_SALESPERSON (325): + Backfill order sales person. + BACKFILL_ORDER_SECONDARY_SALESPEOPLE (329): + Backfill order secondary sales people. + BACKFILL_ORDER_SECONDARY_SALESPEOPLE_ID (328): + Backfill order secondary sales people ID. + BACKFILL_ORDER_SECONDARY_TRAFFICKERS (331): + Backfill order secondary traffickers. + BACKFILL_ORDER_SECONDARY_TRAFFICKERS_ID (330): + Backfill order secondary traffickers ID. + BACKFILL_ORDER_START_DATE (332): + Backfill order start date. + BACKFILL_ORDER_START_DATE_TIME (333): + Backfill order start date and time. + BACKFILL_ORDER_TRAFFICKER (326): + Backfill order trafficker. + BACKFILL_ORDER_TRAFFICKER_ID (327): + Backfill order trafficker ID. + BACKFILL_ORDER_UNLIMITED_END (318): + Whether the Backfill order end time and end + date is set to effectively never end. + BACKFILL_PROGRAMMATIC_BUYER_ID (336): + The ID of the buyer on a backfill + programmatic proposal. + BACKFILL_PROGRAMMATIC_BUYER_NAME (337): + The name of the buyer on a backfill + programmatic proposal. + BRANDING_TYPE (383): + The amount of information about the + Publisher's page sent to the buyer who purchased + the impressions. + BRANDING_TYPE_NAME (384): + The localized version of branding type, the + amount of information about the Publisher's page + sent to the buyer who purchased the impressions. + BROWSER_CATEGORY (119): + Browser category. + BROWSER_CATEGORY_NAME (120): + Browser category name. + BROWSER_ID (235): + The ID of the browser. + BROWSER_NAME (236): + The name of the browser. + CARRIER_ID (369): + Mobile carrier ID. + CARRIER_NAME (368): + Name of the mobile carrier. + CLASSIFIED_ADVERTISER_ID (133): + The ID of an advertiser, classified by + Google, associated with a creative transacted + CLASSIFIED_ADVERTISER_NAME (134): + The name of an advertiser, classified by + Google, associated with a creative transacted + CLASSIFIED_BRAND_ID (243): + ID of the brand, as classified by Google, + CLASSIFIED_BRAND_NAME (244): + Name of the brand, as classified by Google, + CONTENT_ID (246): + ID of the video content served. + CONTENT_NAME (247): + Name of the video content served. + COUNTRY_ID (11): + The criteria ID of the country in which the + ad served. + COUNTRY_NAME (12): + The name of the country in which the ad + served. + CREATIVE_BILLING_TYPE (366): + Enum value of creative billing type + CREATIVE_BILLING_TYPE_NAME (367): + Localized string value of creative billing + type + CREATIVE_CLICK_THROUGH_URL (174): + Represents the click-through URL of a + creative + CREATIVE_ID (138): + The ID of a creative + CREATIVE_NAME (139): + Creative name + CREATIVE_TECHNOLOGY (148): + Creative technology ENUM + CREATIVE_TECHNOLOGY_NAME (149): + Creative technology locallized name + CREATIVE_THIRD_PARTY_VENDOR (361): + Third party vendor name of a creative + CREATIVE_TYPE (344): + Enum value of creative type + CREATIVE_TYPE_NAME (345): + Localized string name of creative type + DATE (3): + Breaks down reporting data by date. + DAY_OF_WEEK (4): + Breaks down reporting data by day of the + week. Monday is 1 and 7 is Sunday. + DEMAND_CHANNEL (9): + Demand channel. + DEMAND_CHANNEL_NAME (10): + Demand channel name. + DEMAND_SUBCHANNEL (22): + Demand subchannel. + DEMAND_SUBCHANNEL_NAME (23): + Demand subchannel name. + DEVICE (226): + The device on which an ad was served. + DEVICE_CATEGORY (15): + The device category to which an ad is being + targeted. + DEVICE_CATEGORY_NAME (16): + The name of the category of device + (smartphone, feature phone, tablet, or desktop) + to which an ad is being targeted. + DEVICE_NAME (225): + The localized name of the device on which an + ad was served. + EXCHANGE_THIRD_PARTY_COMPANY_ID (185): + ID of the yield partner as classified by + Google + EXCHANGE_THIRD_PARTY_COMPANY_NAME (186): + Name of the yield partner as classified by + Google + FIRST_LOOK_PRICING_RULE_ID (248): + The ID of the first look pricing rule. + FIRST_LOOK_PRICING_RULE_NAME (249): + The name of the first look pricing rule. + HOUR (100): + Breaks down reporting data by hour in one + day. + INTERACTION_TYPE (223): + The interaction type of an ad. + INTERACTION_TYPE_NAME (224): + The localized name of the interaction type of + an ad. + INVENTORY_FORMAT (17): + Inventory format. + The format of the ad unit (e.g, banner) where + the ad was requested. + INVENTORY_FORMAT_NAME (18): + Inventory format name. + The format of the ad unit (e.g, banner) where + the ad was requested. + INVENTORY_TYPE (19): + Inventory type. + The kind of web page or device where the ad was + requested. + INVENTORY_TYPE_NAME (20): + Inventory type name. + The kind of web page or device where the ad was + requested. + IS_ADX_DIRECT (382): + Whether traffic is Adx Direct. + IS_FIRST_LOOK_DEAL (401): + Whether traffic is First Look. + KEY_VALUES_ID (214): + The Custom Targeting Value ID + KEY_VALUES_NAME (215): + The Custom Targeting Value formatted like + = + LINE_ITEM_ARCHIVED (188): + Whether a Line item is archived. + LINE_ITEM_COMPANION_DELIVERY_OPTION (204): + Line item comanion delivery option ENUM + value. + LINE_ITEM_COMPANION_DELIVERY_OPTION_NAME (205): + Localized line item comanion delivery option + name. + LINE_ITEM_COMPUTED_STATUS (250): + The computed status of the LineItem. + LINE_ITEM_COMPUTED_STATUS_NAME (251): + The localized name of the computed status of + the LineItem. + LINE_ITEM_CONTRACTED_QUANTITY (92): + The contracted units bought for the Line + item. + LINE_ITEM_COST_PER_UNIT (85): + The cost per unit of the Line item. + LINE_ITEM_COST_TYPE (212): + Line item cost type ENUM value. + LINE_ITEM_COST_TYPE_NAME (213): + Localized line item cost type name. + LINE_ITEM_CREATIVE_END_DATE (176): + Represent the end date of a creative + associated with line item + LINE_ITEM_CREATIVE_ROTATION_TYPE (189): + The creative rotation type of the LineItem. + LINE_ITEM_CREATIVE_ROTATION_TYPE_NAME (190): + The localized name of the creative rotation + type of the LineItem. + LINE_ITEM_CREATIVE_START_DATE (175): + Represent the start date of a creative + associated with line item + LINE_ITEM_CURRENCY_CODE (180): + The 3 letter currency code of the Line Item + LINE_ITEM_DELIVERY_INDICATOR (87): + The progress made for the delivery of the + Line item. + LINE_ITEM_DELIVERY_RATE_TYPE (191): + The delivery rate type of the LineItem. + LINE_ITEM_DELIVERY_RATE_TYPE_NAME (192): + The localized name of the delivery rate type + of the LineItem. + LINE_ITEM_DISCOUNT_ABSOLUTE (195): + The discount of the LineItem in whole units + in the LineItem's currency code, or if + unspecified the Network's currency code. + LINE_ITEM_DISCOUNT_PERCENTAGE (196): + The discount of the LineItem in percentage. + LINE_ITEM_END_DATE (81): + The end date of the Line item. + LINE_ITEM_END_DATE_TIME (83): + The end date and time of the Line item. + LINE_ITEM_ENVIRONMENT_TYPE (201): + The ENUM value of the environment a LineItem + is targeting. + LINE_ITEM_ENVIRONMENT_TYPE_NAME (202): + The localized name of the environment a + LineItem is targeting. + LINE_ITEM_EXTERNAL_DEAL_ID (97): + The deal ID of the Line item. Set for + Programmatic Direct campaigns. + LINE_ITEM_EXTERNAL_ID (86): + The external ID of the Line item. + LINE_ITEM_FREQUENCY_CAP (256): + The frequency cap of the Line item + (descriptive string). + LINE_ITEM_ID (1): + Line item ID. + LINE_ITEM_LAST_MODIFIED_BY_APP (181): + The application that last modified the Line + Item. + LINE_ITEM_LIFETIME_CLICKS (95): + The total number of clicks delivered of the + lifetime of the Line item. + LINE_ITEM_LIFETIME_IMPRESSIONS (94): + The total number of impressions delivered + over the lifetime of the Line item. + LINE_ITEM_LIFETIME_VIEWABLE_IMPRESSIONS (96): + The total number of viewable impressions + delivered over the lifetime of the Line item. + LINE_ITEM_MAKEGOOD (89): + Whether or not the Line item is Makegood. + Makegood refers to free inventory offered to + buyers to compensate for mistakes or + under-delivery in the original campaigns. + LINE_ITEM_NAME (2): + Line item Name. + LINE_ITEM_NON_CPD_BOOKED_REVENUE (98): + The cost of booking for the Line item + (non-CPD). + LINE_ITEM_OPTIMIZABLE (90): + Whether a Line item is eligible for + opitimization. + LINE_ITEM_PRIMARY_GOAL_TYPE (210): + Goal type ENUM value of the primary goal of + the line item. + LINE_ITEM_PRIMARY_GOAL_TYPE_NAME (211): + Localized goal type name of the primary goal + of the line item. + LINE_ITEM_PRIMARY_GOAL_UNITS_ABSOLUTE (93): + The total number of impressions or clicks that are reserved + for a line item. For line items of type BULK or + PRICE_PRIORITY, this represents the number of remaining + impressions reserved. If the line item has an impression cap + goal, this represents the number of impressions or + conversions that the line item will stop serving at if + reached. + LINE_ITEM_PRIMARY_GOAL_UNITS_PERCENTAGE (396): + The percentage of impressions or clicks that + are reserved for a line item. For line items of + type SPONSORSHIP, this represents the percentage + of available impressions reserved. For line + items of type NETWORK or HOUSE, this represents + the percentage of remaining impressions + reserved. + LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE (208): + Unit type ENUM value of the primary goal of + the line item. + LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE_NAME (209): + Localized unit type name of the primary goal + of the line item. + LINE_ITEM_PRIORITY (24): + The priority of this Line item as a value + between 1 and 16. In general, a lower priority + means more serving priority for the Line item. + LINE_ITEM_RESERVATION_STATUS (304): + ENUM value describing the state of inventory + reservation for the LineItem. + LINE_ITEM_RESERVATION_STATUS_NAME (305): + Localized string describing the state of + inventory reservation for the LineItem. + LINE_ITEM_START_DATE (82): + The start date of the Line item. + LINE_ITEM_START_DATE_TIME (84): + The start date and time of the Line item. + LINE_ITEM_TYPE (193): + Line item type ENUM value. + LINE_ITEM_TYPE_NAME (194): + Localized line item type name. + LINE_ITEM_UNLIMITED_END (187): + Whether the Line item end time and end date + is set to effectively never end. + LINE_ITEM_VALUE_COST_PER_UNIT (88): + The artificial cost per unit used by the Ad + server to help rank inventory. + LINE_ITEM_WEB_PROPERTY_CODE (179): + The web property code used for dynamic + allocation Line Items. + MASTER_COMPANION_CREATIVE_ID (140): + The ID of creative, includes regular + creatives, and master and companions in case of + creative sets + MASTER_COMPANION_CREATIVE_NAME (141): + Name of creative, includes regular creatives, + and master and companions in case of creative + sets + MOBILE_APP_FREE (128): + Whether the mobile app is free. + MOBILE_APP_ICON_URL (129): + URL of app icon for the mobile app. + MOBILE_APP_ID (123): + The ID of the Mobile App. + MOBILE_APP_NAME (127): + The name of the mobile app. + MOBILE_APP_OWNERSHIP_STATUS (311): + Ownership status of the mobile app. + MOBILE_APP_OWNERSHIP_STATUS_NAME (312): + Ownership status of the mobile app. + MOBILE_APP_STORE (125): + The App Store of the mobile app. + MOBILE_APP_STORE_NAME (245): + The localized name of the mobile app store. + MOBILE_INVENTORY_TYPE (99): + Mobile inventory type. + Identifies whether a mobile ad came from a + regular web page, an AMP web page, or a mobile + app. + Values match the Inventory type dimension + available in the Overview Home dashboard. Note: + Video takes precedence over any other value, for + example, if there is an in-stream video + impression on a desktop device, it will be + attributed to in-stream video and not desktop + web. + MOBILE_INVENTORY_TYPE_NAME (21): + Mobile inventory type name. + Identifies whether a mobile ad came from a + regular web page, an AMP web page, or a mobile + app. + MOBILE_SDK_VERSION_NAME (130): + SDK version of the mobile device. + MONTH_YEAR (6): + Breaks down reporting data by month and year. + NATIVE_AD_FORMAT_ID (255): + Native ad format ID. + NATIVE_AD_FORMAT_NAME (254): + Native ad format name. + NATIVE_STYLE_ID (253): + Native style ID. + NATIVE_STYLE_NAME (252): + Native style name. + OPERATING_SYSTEM_CATEGORY (117): + Operating system category. + OPERATING_SYSTEM_CATEGORY_NAME (118): + Operating system category name. + OPERATING_SYSTEM_VERSION_ID (238): + ID of the operating system version. + OPERATING_SYSTEM_VERSION_NAME (237): + Details of the operating system, including + version. + ORDER_AGENCY (150): + Order agency. + ORDER_AGENCY_ID (151): + Order agency ID. + ORDER_BOOKED_CPC (152): + Order booked CPC. + ORDER_BOOKED_CPM (153): + Order booked CPM. + ORDER_DELIVERY_STATUS (231): + Order delivery status ENUM value. + ORDER_DELIVERY_STATUS_NAME (239): + Order delivery status localized name. + ORDER_END_DATE (154): + Order end date. + ORDER_END_DATE_TIME (155): + Order end date and time. + ORDER_EXTERNAL_ID (156): + Order external ID. + ORDER_ID (7): + Order id. + ORDER_LABELS (170): + Order labels. + ORDER_LABEL_IDS (171): + Order labels IDs. + ORDER_LIFETIME_CLICKS (158): + Order lifetime clicks. + ORDER_LIFETIME_IMPRESSIONS (159): + Order lifetime impressions. + ORDER_NAME (8): + Order name. + ORDER_PO_NUMBER (160): + Order PO number. + ORDER_PROGRAMMATIC (157): + Whether the Order is programmatic. + ORDER_SALESPERSON (161): + Order sales person. + ORDER_SECONDARY_SALESPEOPLE (164): + Order secondary sales people. + ORDER_SECONDARY_SALESPEOPLE_ID (165): + Order secondary sales people ID. + ORDER_SECONDARY_TRAFFICKERS (166): + Order secondary traffickers. + ORDER_SECONDARY_TRAFFICKERS_ID (167): + Order secondary traffickers ID. + ORDER_START_DATE (168): + Order start date. + ORDER_START_DATE_TIME (169): + Order start date and time. + ORDER_TRAFFICKER (162): + Order trafficker. + ORDER_TRAFFICKER_ID (163): + Order trafficker ID. + ORDER_UNLIMITED_END (203): + Whether the Order end time and end date is + set to effectively never end. + PLACEMENT_ID (113): + Placement ID + PLACEMENT_ID_ALL (144): + The full list of placement IDs associated + with the ad unit. + PLACEMENT_NAME (114): + Placement name + PLACEMENT_NAME_ALL (145): + The full list of placement names associated + with the ad unit. + PLACEMENT_STATUS (362): + Placement status ENUM value + PLACEMENT_STATUS_ALL (363): + The full list of placement status ENUM values + associated with the ad unit. + PLACEMENT_STATUS_NAME (364): + Localized placement status name. + PLACEMENT_STATUS_NAME_ALL (365): + The full list of localized placement status + names associated with the ad unit. + PROGRAMMATIC_BUYER_ID (240): + The ID of the buyer on a programmatic + proposal. + PROGRAMMATIC_BUYER_NAME (241): + The name of the buyer on a programmatic + proposal. + PROGRAMMATIC_CHANNEL (13): + Programmatic channel. + The type of transaction that occurred in Ad + Exchange. + PROGRAMMATIC_CHANNEL_NAME (14): + Programmatic channel name. + The type of transaction that occurred in Ad + Exchange. + RENDERED_CREATIVE_SIZE (343): + The size of a rendered creative, It can + differ with the creative's size if a creative is + shown in an ad slot of a different size. + REQUESTED_AD_SIZES (352): + Inventory Requested Ad Sizes dimension + REQUEST_TYPE (146): + Request type ENUM + REQUEST_TYPE_NAME (147): + Request type locallized name + SITE (387): + Information about domain or subdomains. + TARGETING_ID (232): + The ID of the browser, device or other + environment into which a line item or creative + was served. + TARGETING_NAME (233): + Information about the browser, device and + other environments into which a line item or + creative was served. + TARGETING_TYPE (385): + The way in which advertisers targeted their + ads. + TARGETING_TYPE_NAME (386): + The localized name of the way in which + advertisers targeted their ads. + TRAFFIC_SOURCE (388): + Inventory Traffic source dimension + TRAFFIC_SOURCE_NAME (389): + Inventory Traffic source dimension name + UNIFIED_PRICING_RULE_ID (393): + Unified pricing rule ID dimension + UNIFIED_PRICING_RULE_NAME (394): + Unified pricing rule name dimension + VIDEO_PLCMT (172): + The video placement enum as defined by ADCOM + 1.0-202303. + VIDEO_PLCMT_NAME (173): + The localized name of the video placement as + defined by ADCOM 1.0-202303. + WEEK (5): + Breaks down reporting data by week of the + year. + YIELD_GROUP_BUYER_NAME (184): + Name of the company within a yield group + YIELD_GROUP_ID (182): + ID of the group of ad networks or exchanges + used for Mediation and Open Bidding + YIELD_GROUP_NAME (183): + Name of the group of ad networks or exchanges + used for Mediation and Open Bidding + LINE_ITEM_CUSTOM_FIELD_0_OPTION_ID (10000): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 0 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_1_OPTION_ID (10001): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 1 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_2_OPTION_ID (10002): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 2 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_3_OPTION_ID (10003): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 3 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_4_OPTION_ID (10004): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 4 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_5_OPTION_ID (10005): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 5 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_6_OPTION_ID (10006): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 6 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_7_OPTION_ID (10007): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 7 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_8_OPTION_ID (10008): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 8 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_9_OPTION_ID (10009): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 9 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_10_OPTION_ID (10010): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 10 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_11_OPTION_ID (10011): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 11 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_12_OPTION_ID (10012): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 12 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_13_OPTION_ID (10013): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 13 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_14_OPTION_ID (10014): + Custom field option ID for Line Item with custom field ID + equal to the ID in index 14 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_0_VALUE (11000): + Custom field value for Line Item with custom field ID equal + to the ID in index 0 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_1_VALUE (11001): + Custom field value for Line Item with custom field ID equal + to the ID in index 1 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_2_VALUE (11002): + Custom field value for Line Item with custom field ID equal + to the ID in index 2 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_3_VALUE (11003): + Custom field value for Line Item with custom field ID equal + to the ID in index 3 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_4_VALUE (11004): + Custom field value for Line Item with custom field ID equal + to the ID in index 4 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_5_VALUE (11005): + Custom field value for Line Item with custom field ID equal + to the ID in index 5 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_6_VALUE (11006): + Custom field value for Line Item with custom field ID equal + to the ID in index 6 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_7_VALUE (11007): + Custom field value for Line Item with custom field ID equal + to the ID in index 7 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_8_VALUE (11008): + Custom field value for Line Item with custom field ID equal + to the ID in index 8 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_9_VALUE (11009): + Custom field value for Line Item with custom field ID equal + to the ID in index 9 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_10_VALUE (11010): + Custom field value for Line Item with custom field ID equal + to the ID in index 10 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_11_VALUE (11011): + Custom field value for Line Item with custom field ID equal + to the ID in index 11 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_12_VALUE (11012): + Custom field value for Line Item with custom field ID equal + to the ID in index 12 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_13_VALUE (11013): + Custom field value for Line Item with custom field ID equal + to the ID in index 13 of + ``ReportDefinition.line_item_custom_field_ids``. + LINE_ITEM_CUSTOM_FIELD_14_VALUE (11014): + Custom field value for Line Item with custom field ID equal + to the ID in index 14 of + ``ReportDefinition.line_item_custom_field_ids``. + ORDER_CUSTOM_FIELD_0_OPTION_ID (12000): + Custom field option ID for Order with custom field ID equal + to the ID in index 0 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_1_OPTION_ID (12001): + Custom field option ID for Order with custom field ID equal + to the ID in index 1 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_2_OPTION_ID (12002): + Custom field option ID for Order with custom field ID equal + to the ID in index 2 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_3_OPTION_ID (12003): + Custom field option ID for Order with custom field ID equal + to the ID in index 3 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_4_OPTION_ID (12004): + Custom field option ID for Order with custom field ID equal + to the ID in index 4 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_5_OPTION_ID (12005): + Custom field option ID for Order with custom field ID equal + to the ID in index 5 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_6_OPTION_ID (12006): + Custom field option ID for Order with custom field ID equal + to the ID in index 6 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_7_OPTION_ID (12007): + Custom field option ID for Order with custom field ID equal + to the ID in index 7 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_8_OPTION_ID (12008): + Custom field option ID for Order with custom field ID equal + to the ID in index 8 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_9_OPTION_ID (12009): + Custom field option ID for Order with custom field ID equal + to the ID in index 9 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_10_OPTION_ID (12010): + Custom field option ID for Order with custom field ID equal + to the ID in index 10 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_11_OPTION_ID (12011): + Custom field option ID for Order with custom field ID equal + to the ID in index 11 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_12_OPTION_ID (12012): + Custom field option ID for Order with custom field ID equal + to the ID in index 12 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_13_OPTION_ID (12013): + Custom field option ID for Order with custom field ID equal + to the ID in index 13 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_14_OPTION_ID (12014): + Custom field option ID for Order with custom field ID equal + to the ID in index 14 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_0_VALUE (13000): + Custom field value for Order with custom field ID equal to + the ID in index 0 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_1_VALUE (13001): + Custom field value for Order with custom field ID equal to + the ID in index 1 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_2_VALUE (13002): + Custom field value for Order with custom field ID equal to + the ID in index 2 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_3_VALUE (13003): + Custom field value for Order with custom field ID equal to + the ID in index 3 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_4_VALUE (13004): + Custom field value for Order with custom field ID equal to + the ID in index 4 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_5_VALUE (13005): + Custom field value for Order with custom field ID equal to + the ID in index 5 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_6_VALUE (13006): + Custom field value for Order with custom field ID equal to + the ID in index 6 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_7_VALUE (13007): + Custom field value for Order with custom field ID equal to + the ID in index 7 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_8_VALUE (13008): + Custom field value for Order with custom field ID equal to + the ID in index 8 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_9_VALUE (13009): + Custom field value for Order with custom field ID equal to + the ID in index 9 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_10_VALUE (13010): + Custom field value for Order with custom field ID equal to + the ID in index 10 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_11_VALUE (13011): + Custom field value for Order with custom field ID equal to + the ID in index 11 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_12_VALUE (13012): + Custom field value for Order with custom field ID equal to + the ID in index 12 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_13_VALUE (13013): + Custom field value for Order with custom field ID equal to + the ID in index 13 of + ``ReportDefinition.order_custom_field_ids``. + ORDER_CUSTOM_FIELD_14_VALUE (13014): + Custom field value for Order with custom field ID equal to + the ID in index 14 of + ``ReportDefinition.order_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_0_OPTION_ID (14000): + Custom field option ID for Creative with custom field ID + equal to the ID in index 0 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_1_OPTION_ID (14001): + Custom field option ID for Creative with custom field ID + equal to the ID in index 1 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_2_OPTION_ID (14002): + Custom field option ID for Creative with custom field ID + equal to the ID in index 2 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_3_OPTION_ID (14003): + Custom field option ID for Creative with custom field ID + equal to the ID in index 3 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_4_OPTION_ID (14004): + Custom field option ID for Creative with custom field ID + equal to the ID in index 4 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_5_OPTION_ID (14005): + Custom field option ID for Creative with custom field ID + equal to the ID in index 5 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_6_OPTION_ID (14006): + Custom field option ID for Creative with custom field ID + equal to the ID in index 6 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_7_OPTION_ID (14007): + Custom field option ID for Creative with custom field ID + equal to the ID in index 7 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_8_OPTION_ID (14008): + Custom field option ID for Creative with custom field ID + equal to the ID in index 8 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_9_OPTION_ID (14009): + Custom field option ID for Creative with custom field ID + equal to the ID in index 9 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_10_OPTION_ID (14010): + Custom field option ID for Creative with custom field ID + equal to the ID in index 10 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_11_OPTION_ID (14011): + Custom field option ID for Creative with custom field ID + equal to the ID in index 11 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_12_OPTION_ID (14012): + Custom field option ID for Creative with custom field ID + equal to the ID in index 12 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_13_OPTION_ID (14013): + Custom field option ID for Creative with custom field ID + equal to the ID in index 13 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_14_OPTION_ID (14014): + Custom field option ID for Creative with custom field ID + equal to the ID in index 14 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_0_VALUE (15000): + Custom field value for Creative with custom field ID equal + to the ID in index 0 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_1_VALUE (15001): + Custom field value for Creative with custom field ID equal + to the ID in index 1 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_2_VALUE (15002): + Custom field value for Creative with custom field ID equal + to the ID in index 2 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_3_VALUE (15003): + Custom field value for Creative with custom field ID equal + to the ID in index 3 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_4_VALUE (15004): + Custom field value for Creative with custom field ID equal + to the ID in index 4 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_5_VALUE (15005): + Custom field value for Creative with custom field ID equal + to the ID in index 5 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_6_VALUE (15006): + Custom field value for Creative with custom field ID equal + to the ID in index 6 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_7_VALUE (15007): + Custom field value for Creative with custom field ID equal + to the ID in index 7 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_8_VALUE (15008): + Custom field value for Creative with custom field ID equal + to the ID in index 8 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_9_VALUE (15009): + Custom field value for Creative with custom field ID equal + to the ID in index 9 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_10_VALUE (15010): + Custom field value for Creative with custom field ID equal + to the ID in index 10 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_11_VALUE (15011): + Custom field value for Creative with custom field ID equal + to the ID in index 11 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_12_VALUE (15012): + Custom field value for Creative with custom field ID equal + to the ID in index 12 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_13_VALUE (15013): + Custom field value for Creative with custom field ID equal + to the ID in index 13 of + ``ReportDefinition.creative_custom_field_ids``. + CREATIVE_CUSTOM_FIELD_14_VALUE (15014): + Custom field value for Creative with custom field ID equal + to the ID in index 14 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_0_OPTION_ID (16000): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 0 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_1_OPTION_ID (16001): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 1 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_2_OPTION_ID (16002): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 2 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_3_OPTION_ID (16003): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 3 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_4_OPTION_ID (16004): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 4 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_5_OPTION_ID (16005): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 5 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_6_OPTION_ID (16006): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 6 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_7_OPTION_ID (16007): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 7 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_8_OPTION_ID (16008): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 8 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_9_OPTION_ID (16009): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 9 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_10_OPTION_ID (16010): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 10 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_11_OPTION_ID (16011): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 11 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_12_OPTION_ID (16012): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 12 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_13_OPTION_ID (16013): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 13 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_14_OPTION_ID (16014): + Custom field option ID for Backfill line item with custom + field ID equal to the ID in index 14 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_0_VALUE (17000): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 0 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_1_VALUE (17001): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 1 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_2_VALUE (17002): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 2 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_3_VALUE (17003): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 3 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_4_VALUE (17004): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 4 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_5_VALUE (17005): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 5 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_6_VALUE (17006): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 6 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_7_VALUE (17007): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 7 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_8_VALUE (17008): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 8 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_9_VALUE (17009): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 9 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_10_VALUE (17010): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 10 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_11_VALUE (17011): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 11 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_12_VALUE (17012): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 12 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_13_VALUE (17013): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 13 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_LINE_ITEM_CUSTOM_FIELD_14_VALUE (17014): + Custom field value for Backfill line item with custom field + ID equal to the ID in index 14 of + ``ReportDefinition.line_item_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_0_OPTION_ID (18000): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 0 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_1_OPTION_ID (18001): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 1 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_2_OPTION_ID (18002): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 2 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_3_OPTION_ID (18003): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 3 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_4_OPTION_ID (18004): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 4 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_5_OPTION_ID (18005): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 5 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_6_OPTION_ID (18006): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 6 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_7_OPTION_ID (18007): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 7 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_8_OPTION_ID (18008): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 8 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_9_OPTION_ID (18009): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 9 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_10_OPTION_ID (18010): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 10 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_11_OPTION_ID (18011): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 11 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_12_OPTION_ID (18012): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 12 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_13_OPTION_ID (18013): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 13 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_14_OPTION_ID (18014): + Custom field option ID for Backfill order with custom field + ID equal to the ID in index 14 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_0_VALUE (19000): + Custom field value for Backfill order with custom field ID + equal to the ID in index 0 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_1_VALUE (19001): + Custom field value for Backfill order with custom field ID + equal to the ID in index 1 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_2_VALUE (19002): + Custom field value for Backfill order with custom field ID + equal to the ID in index 2 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_3_VALUE (19003): + Custom field value for Backfill order with custom field ID + equal to the ID in index 3 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_4_VALUE (19004): + Custom field value for Backfill order with custom field ID + equal to the ID in index 4 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_5_VALUE (19005): + Custom field value for Backfill order with custom field ID + equal to the ID in index 5 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_6_VALUE (19006): + Custom field value for Backfill order with custom field ID + equal to the ID in index 6 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_7_VALUE (19007): + Custom field value for Backfill order with custom field ID + equal to the ID in index 7 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_8_VALUE (19008): + Custom field value for Backfill order with custom field ID + equal to the ID in index 8 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_9_VALUE (19009): + Custom field value for Backfill order with custom field ID + equal to the ID in index 9 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_10_VALUE (19010): + Custom field value for Backfill order with custom field ID + equal to the ID in index 10 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_11_VALUE (19011): + Custom field value for Backfill order with custom field ID + equal to the ID in index 11 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_12_VALUE (19012): + Custom field value for Backfill order with custom field ID + equal to the ID in index 12 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_13_VALUE (19013): + Custom field value for Backfill order with custom field ID + equal to the ID in index 13 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_ORDER_CUSTOM_FIELD_14_VALUE (19014): + Custom field value for Backfill order with custom field ID + equal to the ID in index 14 of + ``ReportDefinition.order_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_0_OPTION_ID (20000): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 0 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_1_OPTION_ID (20001): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 1 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_2_OPTION_ID (20002): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 2 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_3_OPTION_ID (20003): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 3 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_4_OPTION_ID (20004): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 4 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_5_OPTION_ID (20005): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 5 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_6_OPTION_ID (20006): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 6 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_7_OPTION_ID (20007): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 7 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_8_OPTION_ID (20008): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 8 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_9_OPTION_ID (20009): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 9 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_10_OPTION_ID (20010): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 10 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_11_OPTION_ID (20011): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 11 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_12_OPTION_ID (20012): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 12 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_13_OPTION_ID (20013): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 13 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_14_OPTION_ID (20014): + Custom field option ID for Backfill creative with custom + field ID equal to the ID in index 14 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_0_VALUE (21000): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 0 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_1_VALUE (21001): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 1 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_2_VALUE (21002): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 2 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_3_VALUE (21003): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 3 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_4_VALUE (21004): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 4 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_5_VALUE (21005): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 5 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_6_VALUE (21006): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 6 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_7_VALUE (21007): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 7 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_8_VALUE (21008): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 8 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_9_VALUE (21009): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 9 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_10_VALUE (21010): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 10 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_11_VALUE (21011): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 11 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_12_VALUE (21012): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 12 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_13_VALUE (21013): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 13 of + ``ReportDefinition.creative_custom_field_ids``. + BACKFILL_CREATIVE_CUSTOM_FIELD_14_VALUE (21014): + Custom field value for Backfill creative with custom field + ID equal to the ID in index 14 of + ``ReportDefinition.creative_custom_field_ids``. + CUSTOM_DIMENSION_0_VALUE_ID (100000): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 0 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_1_VALUE_ID (100001): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 1 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_2_VALUE_ID (100002): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 2 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_3_VALUE_ID (100003): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 3 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_4_VALUE_ID (100004): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 4 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_5_VALUE_ID (100005): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 5 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_6_VALUE_ID (100006): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 6 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_7_VALUE_ID (100007): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 9 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_8_VALUE_ID (100008): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 8 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_9_VALUE_ID (100009): + Custom Dimension Value ID for Custom Dimension with key + equal to the key in index 9 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_0_VALUE (101000): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 0 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_1_VALUE (101001): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 1 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_2_VALUE (101002): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 2 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_3_VALUE (101003): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 3 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_4_VALUE (101004): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 4 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_5_VALUE (101005): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 5 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_6_VALUE (101006): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 6 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_7_VALUE (101007): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 7 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_8_VALUE (101008): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 8 of + ``ReportDefinition.custom_dimension_key_ids``. + CUSTOM_DIMENSION_9_VALUE (101009): + Custom Dimension Value name for Custom Dimension with key + equal to the id in index 9 of + ``ReportDefinition.custom_dimension_key_ids``. + """ + DIMENSION_UNSPECIFIED = 0 + ADVERTISER_DOMAIN_NAME = 242 + ADVERTISER_EXTERNAL_ID = 228 + ADVERTISER_ID = 131 + ADVERTISER_LABELS = 230 + ADVERTISER_LABEL_IDS = 229 + ADVERTISER_NAME = 132 + ADVERTISER_PRIMARY_CONTACT = 227 + AD_LOCATION = 390 + AD_LOCATION_NAME = 391 + AD_UNIT_CODE = 64 + AD_UNIT_CODE_LEVEL_1 = 65 + AD_UNIT_CODE_LEVEL_10 = 74 + AD_UNIT_CODE_LEVEL_11 = 75 + AD_UNIT_CODE_LEVEL_12 = 76 + AD_UNIT_CODE_LEVEL_13 = 77 + AD_UNIT_CODE_LEVEL_14 = 78 + AD_UNIT_CODE_LEVEL_15 = 79 + AD_UNIT_CODE_LEVEL_16 = 80 + AD_UNIT_CODE_LEVEL_2 = 66 + AD_UNIT_CODE_LEVEL_3 = 67 + AD_UNIT_CODE_LEVEL_4 = 68 + AD_UNIT_CODE_LEVEL_5 = 69 + AD_UNIT_CODE_LEVEL_6 = 70 + AD_UNIT_CODE_LEVEL_7 = 71 + AD_UNIT_CODE_LEVEL_8 = 72 + AD_UNIT_CODE_LEVEL_9 = 73 + AD_UNIT_DEPTH = 101 + AD_UNIT_ID = 25 + AD_UNIT_ID_ALL_LEVEL = 27 + AD_UNIT_ID_LEVEL_1 = 30 + AD_UNIT_ID_LEVEL_10 = 48 + AD_UNIT_ID_LEVEL_11 = 50 + AD_UNIT_ID_LEVEL_12 = 52 + AD_UNIT_ID_LEVEL_13 = 54 + AD_UNIT_ID_LEVEL_14 = 56 + AD_UNIT_ID_LEVEL_15 = 58 + AD_UNIT_ID_LEVEL_16 = 60 + AD_UNIT_ID_LEVEL_2 = 32 + AD_UNIT_ID_LEVEL_3 = 34 + AD_UNIT_ID_LEVEL_4 = 36 + AD_UNIT_ID_LEVEL_5 = 38 + AD_UNIT_ID_LEVEL_6 = 40 + AD_UNIT_ID_LEVEL_7 = 42 + AD_UNIT_ID_LEVEL_8 = 44 + AD_UNIT_ID_LEVEL_9 = 46 + AD_UNIT_ID_TOP_LEVEL = 142 + AD_UNIT_NAME = 26 + AD_UNIT_NAME_ALL_LEVEL = 29 + AD_UNIT_NAME_LEVEL_1 = 31 + AD_UNIT_NAME_LEVEL_10 = 49 + AD_UNIT_NAME_LEVEL_11 = 51 + AD_UNIT_NAME_LEVEL_12 = 53 + AD_UNIT_NAME_LEVEL_13 = 55 + AD_UNIT_NAME_LEVEL_14 = 57 + AD_UNIT_NAME_LEVEL_15 = 59 + AD_UNIT_NAME_LEVEL_16 = 61 + AD_UNIT_NAME_LEVEL_2 = 33 + AD_UNIT_NAME_LEVEL_3 = 35 + AD_UNIT_NAME_LEVEL_4 = 37 + AD_UNIT_NAME_LEVEL_5 = 39 + AD_UNIT_NAME_LEVEL_6 = 41 + AD_UNIT_NAME_LEVEL_7 = 43 + AD_UNIT_NAME_LEVEL_8 = 45 + AD_UNIT_NAME_LEVEL_9 = 47 + AD_UNIT_NAME_TOP_LEVEL = 143 + AD_UNIT_REWARD_AMOUNT = 63 + AD_UNIT_REWARD_TYPE = 62 + AD_UNIT_STATUS = 206 + AD_UNIT_STATUS_NAME = 207 + APP_VERSION = 392 + BACKFILL_ADVERTISER_EXTERNAL_ID = 349 + BACKFILL_ADVERTISER_ID = 346 + BACKFILL_ADVERTISER_LABELS = 351 + BACKFILL_ADVERTISER_LABEL_IDS = 350 + BACKFILL_ADVERTISER_NAME = 347 + BACKFILL_ADVERTISER_PRIMARY_CONTACT = 348 + BACKFILL_CREATIVE_BILLING_TYPE = 378 + BACKFILL_CREATIVE_BILLING_TYPE_NAME = 379 + BACKFILL_CREATIVE_CLICK_THROUGH_URL = 376 + BACKFILL_CREATIVE_ID = 370 + BACKFILL_CREATIVE_NAME = 371 + BACKFILL_CREATIVE_THIRD_PARTY_VENDOR = 377 + BACKFILL_CREATIVE_TYPE = 374 + BACKFILL_CREATIVE_TYPE_NAME = 375 + BACKFILL_LINE_ITEM_ARCHIVED = 278 + BACKFILL_LINE_ITEM_COMPANION_DELIVERY_OPTION = 258 + BACKFILL_LINE_ITEM_COMPANION_DELIVERY_OPTION_NAME = 259 + BACKFILL_LINE_ITEM_COMPUTED_STATUS = 296 + BACKFILL_LINE_ITEM_COMPUTED_STATUS_NAME = 297 + BACKFILL_LINE_ITEM_CONTRACTED_QUANTITY = 280 + BACKFILL_LINE_ITEM_COST_PER_UNIT = 272 + BACKFILL_LINE_ITEM_COST_TYPE = 264 + BACKFILL_LINE_ITEM_COST_TYPE_NAME = 265 + BACKFILL_LINE_ITEM_CREATIVE_END_DATE = 381 + BACKFILL_LINE_ITEM_CREATIVE_ROTATION_TYPE = 290 + BACKFILL_LINE_ITEM_CREATIVE_ROTATION_TYPE_NAME = 291 + BACKFILL_LINE_ITEM_CREATIVE_START_DATE = 380 + BACKFILL_LINE_ITEM_CURRENCY_CODE = 288 + BACKFILL_LINE_ITEM_DELIVERY_INDICATOR = 274 + BACKFILL_LINE_ITEM_DELIVERY_RATE_TYPE = 292 + BACKFILL_LINE_ITEM_DELIVERY_RATE_TYPE_NAME = 293 + BACKFILL_LINE_ITEM_DISCOUNT_ABSOLUTE = 294 + BACKFILL_LINE_ITEM_DISCOUNT_PERCENTAGE = 295 + BACKFILL_LINE_ITEM_END_DATE = 267 + BACKFILL_LINE_ITEM_END_DATE_TIME = 269 + BACKFILL_LINE_ITEM_ENVIRONMENT_TYPE = 302 + BACKFILL_LINE_ITEM_ENVIRONMENT_TYPE_NAME = 257 + BACKFILL_LINE_ITEM_EXTERNAL_DEAL_ID = 285 + BACKFILL_LINE_ITEM_EXTERNAL_ID = 273 + BACKFILL_LINE_ITEM_FREQUENCY_CAP = 303 + BACKFILL_LINE_ITEM_ID = 298 + BACKFILL_LINE_ITEM_LAST_MODIFIED_BY_APP = 289 + BACKFILL_LINE_ITEM_LIFETIME_CLICKS = 283 + BACKFILL_LINE_ITEM_LIFETIME_IMPRESSIONS = 282 + BACKFILL_LINE_ITEM_LIFETIME_VIEWABLE_IMPRESSIONS = 284 + BACKFILL_LINE_ITEM_MAKEGOOD = 276 + BACKFILL_LINE_ITEM_NAME = 299 + BACKFILL_LINE_ITEM_NON_CPD_BOOKED_REVENUE = 286 + BACKFILL_LINE_ITEM_OPTIMIZABLE = 277 + BACKFILL_LINE_ITEM_PRIMARY_GOAL_TYPE = 262 + BACKFILL_LINE_ITEM_PRIMARY_GOAL_TYPE_NAME = 263 + BACKFILL_LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE = 260 + BACKFILL_LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE_NAME = 261 + BACKFILL_LINE_ITEM_PRIORITY = 266 + BACKFILL_LINE_ITEM_RESERVATION_STATUS = 306 + BACKFILL_LINE_ITEM_RESERVATION_STATUS_NAME = 307 + BACKFILL_LINE_ITEM_START_DATE = 268 + BACKFILL_LINE_ITEM_START_DATE_TIME = 270 + BACKFILL_LINE_ITEM_TYPE = 300 + BACKFILL_LINE_ITEM_TYPE_NAME = 301 + BACKFILL_LINE_ITEM_UNLIMITED_END = 271 + BACKFILL_LINE_ITEM_VALUE_COST_PER_UNIT = 275 + BACKFILL_LINE_ITEM_WEB_PROPERTY_CODE = 287 + BACKFILL_MASTER_COMPANION_CREATIVE_ID = 372 + BACKFILL_MASTER_COMPANION_CREATIVE_NAME = 373 + BACKFILL_ORDER_AGENCY = 313 + BACKFILL_ORDER_AGENCY_ID = 314 + BACKFILL_ORDER_BOOKED_CPC = 315 + BACKFILL_ORDER_BOOKED_CPM = 316 + BACKFILL_ORDER_DELIVERY_STATUS = 340 + BACKFILL_ORDER_DELIVERY_STATUS_NAME = 341 + BACKFILL_ORDER_END_DATE = 317 + BACKFILL_ORDER_END_DATE_TIME = 319 + BACKFILL_ORDER_EXTERNAL_ID = 320 + BACKFILL_ORDER_ID = 338 + BACKFILL_ORDER_LABELS = 334 + BACKFILL_ORDER_LABEL_IDS = 335 + BACKFILL_ORDER_LIFETIME_CLICKS = 322 + BACKFILL_ORDER_LIFETIME_IMPRESSIONS = 323 + BACKFILL_ORDER_NAME = 339 + BACKFILL_ORDER_PO_NUMBER = 324 + BACKFILL_ORDER_PROGRAMMATIC = 321 + BACKFILL_ORDER_SALESPERSON = 325 + BACKFILL_ORDER_SECONDARY_SALESPEOPLE = 329 + BACKFILL_ORDER_SECONDARY_SALESPEOPLE_ID = 328 + BACKFILL_ORDER_SECONDARY_TRAFFICKERS = 331 + BACKFILL_ORDER_SECONDARY_TRAFFICKERS_ID = 330 + BACKFILL_ORDER_START_DATE = 332 + BACKFILL_ORDER_START_DATE_TIME = 333 + BACKFILL_ORDER_TRAFFICKER = 326 + BACKFILL_ORDER_TRAFFICKER_ID = 327 + BACKFILL_ORDER_UNLIMITED_END = 318 + BACKFILL_PROGRAMMATIC_BUYER_ID = 336 + BACKFILL_PROGRAMMATIC_BUYER_NAME = 337 + BRANDING_TYPE = 383 + BRANDING_TYPE_NAME = 384 + BROWSER_CATEGORY = 119 + BROWSER_CATEGORY_NAME = 120 + BROWSER_ID = 235 + BROWSER_NAME = 236 + CARRIER_ID = 369 + CARRIER_NAME = 368 + CLASSIFIED_ADVERTISER_ID = 133 + CLASSIFIED_ADVERTISER_NAME = 134 + CLASSIFIED_BRAND_ID = 243 + CLASSIFIED_BRAND_NAME = 244 + CONTENT_ID = 246 + CONTENT_NAME = 247 + COUNTRY_ID = 11 + COUNTRY_NAME = 12 + CREATIVE_BILLING_TYPE = 366 + CREATIVE_BILLING_TYPE_NAME = 367 + CREATIVE_CLICK_THROUGH_URL = 174 + CREATIVE_ID = 138 + CREATIVE_NAME = 139 + CREATIVE_TECHNOLOGY = 148 + CREATIVE_TECHNOLOGY_NAME = 149 + CREATIVE_THIRD_PARTY_VENDOR = 361 + CREATIVE_TYPE = 344 + CREATIVE_TYPE_NAME = 345 + DATE = 3 + DAY_OF_WEEK = 4 + DEMAND_CHANNEL = 9 + DEMAND_CHANNEL_NAME = 10 + DEMAND_SUBCHANNEL = 22 + DEMAND_SUBCHANNEL_NAME = 23 + DEVICE = 226 + DEVICE_CATEGORY = 15 + DEVICE_CATEGORY_NAME = 16 + DEVICE_NAME = 225 + EXCHANGE_THIRD_PARTY_COMPANY_ID = 185 + EXCHANGE_THIRD_PARTY_COMPANY_NAME = 186 + FIRST_LOOK_PRICING_RULE_ID = 248 + FIRST_LOOK_PRICING_RULE_NAME = 249 + HOUR = 100 + INTERACTION_TYPE = 223 + INTERACTION_TYPE_NAME = 224 + INVENTORY_FORMAT = 17 + INVENTORY_FORMAT_NAME = 18 + INVENTORY_TYPE = 19 + INVENTORY_TYPE_NAME = 20 + IS_ADX_DIRECT = 382 + IS_FIRST_LOOK_DEAL = 401 + KEY_VALUES_ID = 214 + KEY_VALUES_NAME = 215 + LINE_ITEM_ARCHIVED = 188 + LINE_ITEM_COMPANION_DELIVERY_OPTION = 204 + LINE_ITEM_COMPANION_DELIVERY_OPTION_NAME = 205 + LINE_ITEM_COMPUTED_STATUS = 250 + LINE_ITEM_COMPUTED_STATUS_NAME = 251 + LINE_ITEM_CONTRACTED_QUANTITY = 92 + LINE_ITEM_COST_PER_UNIT = 85 + LINE_ITEM_COST_TYPE = 212 + LINE_ITEM_COST_TYPE_NAME = 213 + LINE_ITEM_CREATIVE_END_DATE = 176 + LINE_ITEM_CREATIVE_ROTATION_TYPE = 189 + LINE_ITEM_CREATIVE_ROTATION_TYPE_NAME = 190 + LINE_ITEM_CREATIVE_START_DATE = 175 + LINE_ITEM_CURRENCY_CODE = 180 + LINE_ITEM_DELIVERY_INDICATOR = 87 + LINE_ITEM_DELIVERY_RATE_TYPE = 191 + LINE_ITEM_DELIVERY_RATE_TYPE_NAME = 192 + LINE_ITEM_DISCOUNT_ABSOLUTE = 195 + LINE_ITEM_DISCOUNT_PERCENTAGE = 196 + LINE_ITEM_END_DATE = 81 + LINE_ITEM_END_DATE_TIME = 83 + LINE_ITEM_ENVIRONMENT_TYPE = 201 + LINE_ITEM_ENVIRONMENT_TYPE_NAME = 202 + LINE_ITEM_EXTERNAL_DEAL_ID = 97 + LINE_ITEM_EXTERNAL_ID = 86 + LINE_ITEM_FREQUENCY_CAP = 256 + LINE_ITEM_ID = 1 + LINE_ITEM_LAST_MODIFIED_BY_APP = 181 + LINE_ITEM_LIFETIME_CLICKS = 95 + LINE_ITEM_LIFETIME_IMPRESSIONS = 94 + LINE_ITEM_LIFETIME_VIEWABLE_IMPRESSIONS = 96 + LINE_ITEM_MAKEGOOD = 89 + LINE_ITEM_NAME = 2 + LINE_ITEM_NON_CPD_BOOKED_REVENUE = 98 + LINE_ITEM_OPTIMIZABLE = 90 + LINE_ITEM_PRIMARY_GOAL_TYPE = 210 + LINE_ITEM_PRIMARY_GOAL_TYPE_NAME = 211 + LINE_ITEM_PRIMARY_GOAL_UNITS_ABSOLUTE = 93 + LINE_ITEM_PRIMARY_GOAL_UNITS_PERCENTAGE = 396 + LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE = 208 + LINE_ITEM_PRIMARY_GOAL_UNIT_TYPE_NAME = 209 + LINE_ITEM_PRIORITY = 24 + LINE_ITEM_RESERVATION_STATUS = 304 + LINE_ITEM_RESERVATION_STATUS_NAME = 305 + LINE_ITEM_START_DATE = 82 + LINE_ITEM_START_DATE_TIME = 84 + LINE_ITEM_TYPE = 193 + LINE_ITEM_TYPE_NAME = 194 + LINE_ITEM_UNLIMITED_END = 187 + LINE_ITEM_VALUE_COST_PER_UNIT = 88 + LINE_ITEM_WEB_PROPERTY_CODE = 179 + MASTER_COMPANION_CREATIVE_ID = 140 + MASTER_COMPANION_CREATIVE_NAME = 141 + MOBILE_APP_FREE = 128 + MOBILE_APP_ICON_URL = 129 + MOBILE_APP_ID = 123 + MOBILE_APP_NAME = 127 + MOBILE_APP_OWNERSHIP_STATUS = 311 + MOBILE_APP_OWNERSHIP_STATUS_NAME = 312 + MOBILE_APP_STORE = 125 + MOBILE_APP_STORE_NAME = 245 + MOBILE_INVENTORY_TYPE = 99 + MOBILE_INVENTORY_TYPE_NAME = 21 + MOBILE_SDK_VERSION_NAME = 130 + MONTH_YEAR = 6 + NATIVE_AD_FORMAT_ID = 255 + NATIVE_AD_FORMAT_NAME = 254 + NATIVE_STYLE_ID = 253 + NATIVE_STYLE_NAME = 252 + OPERATING_SYSTEM_CATEGORY = 117 + OPERATING_SYSTEM_CATEGORY_NAME = 118 + OPERATING_SYSTEM_VERSION_ID = 238 + OPERATING_SYSTEM_VERSION_NAME = 237 + ORDER_AGENCY = 150 + ORDER_AGENCY_ID = 151 + ORDER_BOOKED_CPC = 152 + ORDER_BOOKED_CPM = 153 + ORDER_DELIVERY_STATUS = 231 + ORDER_DELIVERY_STATUS_NAME = 239 + ORDER_END_DATE = 154 + ORDER_END_DATE_TIME = 155 + ORDER_EXTERNAL_ID = 156 + ORDER_ID = 7 + ORDER_LABELS = 170 + ORDER_LABEL_IDS = 171 + ORDER_LIFETIME_CLICKS = 158 + ORDER_LIFETIME_IMPRESSIONS = 159 + ORDER_NAME = 8 + ORDER_PO_NUMBER = 160 + ORDER_PROGRAMMATIC = 157 + ORDER_SALESPERSON = 161 + ORDER_SECONDARY_SALESPEOPLE = 164 + ORDER_SECONDARY_SALESPEOPLE_ID = 165 + ORDER_SECONDARY_TRAFFICKERS = 166 + ORDER_SECONDARY_TRAFFICKERS_ID = 167 + ORDER_START_DATE = 168 + ORDER_START_DATE_TIME = 169 + ORDER_TRAFFICKER = 162 + ORDER_TRAFFICKER_ID = 163 + ORDER_UNLIMITED_END = 203 + PLACEMENT_ID = 113 + PLACEMENT_ID_ALL = 144 + PLACEMENT_NAME = 114 + PLACEMENT_NAME_ALL = 145 + PLACEMENT_STATUS = 362 + PLACEMENT_STATUS_ALL = 363 + PLACEMENT_STATUS_NAME = 364 + PLACEMENT_STATUS_NAME_ALL = 365 + PROGRAMMATIC_BUYER_ID = 240 + PROGRAMMATIC_BUYER_NAME = 241 + PROGRAMMATIC_CHANNEL = 13 + PROGRAMMATIC_CHANNEL_NAME = 14 + RENDERED_CREATIVE_SIZE = 343 + REQUESTED_AD_SIZES = 352 + REQUEST_TYPE = 146 + REQUEST_TYPE_NAME = 147 + SITE = 387 + TARGETING_ID = 232 + TARGETING_NAME = 233 + TARGETING_TYPE = 385 + TARGETING_TYPE_NAME = 386 + TRAFFIC_SOURCE = 388 + TRAFFIC_SOURCE_NAME = 389 + UNIFIED_PRICING_RULE_ID = 393 + UNIFIED_PRICING_RULE_NAME = 394 + VIDEO_PLCMT = 172 + VIDEO_PLCMT_NAME = 173 + WEEK = 5 + YIELD_GROUP_BUYER_NAME = 184 + YIELD_GROUP_ID = 182 + YIELD_GROUP_NAME = 183 + LINE_ITEM_CUSTOM_FIELD_0_OPTION_ID = 10000 + LINE_ITEM_CUSTOM_FIELD_1_OPTION_ID = 10001 + LINE_ITEM_CUSTOM_FIELD_2_OPTION_ID = 10002 + LINE_ITEM_CUSTOM_FIELD_3_OPTION_ID = 10003 + LINE_ITEM_CUSTOM_FIELD_4_OPTION_ID = 10004 + LINE_ITEM_CUSTOM_FIELD_5_OPTION_ID = 10005 + LINE_ITEM_CUSTOM_FIELD_6_OPTION_ID = 10006 + LINE_ITEM_CUSTOM_FIELD_7_OPTION_ID = 10007 + LINE_ITEM_CUSTOM_FIELD_8_OPTION_ID = 10008 + LINE_ITEM_CUSTOM_FIELD_9_OPTION_ID = 10009 + LINE_ITEM_CUSTOM_FIELD_10_OPTION_ID = 10010 + LINE_ITEM_CUSTOM_FIELD_11_OPTION_ID = 10011 + LINE_ITEM_CUSTOM_FIELD_12_OPTION_ID = 10012 + LINE_ITEM_CUSTOM_FIELD_13_OPTION_ID = 10013 + LINE_ITEM_CUSTOM_FIELD_14_OPTION_ID = 10014 + LINE_ITEM_CUSTOM_FIELD_0_VALUE = 11000 + LINE_ITEM_CUSTOM_FIELD_1_VALUE = 11001 + LINE_ITEM_CUSTOM_FIELD_2_VALUE = 11002 + LINE_ITEM_CUSTOM_FIELD_3_VALUE = 11003 + LINE_ITEM_CUSTOM_FIELD_4_VALUE = 11004 + LINE_ITEM_CUSTOM_FIELD_5_VALUE = 11005 + LINE_ITEM_CUSTOM_FIELD_6_VALUE = 11006 + LINE_ITEM_CUSTOM_FIELD_7_VALUE = 11007 + LINE_ITEM_CUSTOM_FIELD_8_VALUE = 11008 + LINE_ITEM_CUSTOM_FIELD_9_VALUE = 11009 + LINE_ITEM_CUSTOM_FIELD_10_VALUE = 11010 + LINE_ITEM_CUSTOM_FIELD_11_VALUE = 11011 + LINE_ITEM_CUSTOM_FIELD_12_VALUE = 11012 + LINE_ITEM_CUSTOM_FIELD_13_VALUE = 11013 + LINE_ITEM_CUSTOM_FIELD_14_VALUE = 11014 + ORDER_CUSTOM_FIELD_0_OPTION_ID = 12000 + ORDER_CUSTOM_FIELD_1_OPTION_ID = 12001 + ORDER_CUSTOM_FIELD_2_OPTION_ID = 12002 + ORDER_CUSTOM_FIELD_3_OPTION_ID = 12003 + ORDER_CUSTOM_FIELD_4_OPTION_ID = 12004 + ORDER_CUSTOM_FIELD_5_OPTION_ID = 12005 + ORDER_CUSTOM_FIELD_6_OPTION_ID = 12006 + ORDER_CUSTOM_FIELD_7_OPTION_ID = 12007 + ORDER_CUSTOM_FIELD_8_OPTION_ID = 12008 + ORDER_CUSTOM_FIELD_9_OPTION_ID = 12009 + ORDER_CUSTOM_FIELD_10_OPTION_ID = 12010 + ORDER_CUSTOM_FIELD_11_OPTION_ID = 12011 + ORDER_CUSTOM_FIELD_12_OPTION_ID = 12012 + ORDER_CUSTOM_FIELD_13_OPTION_ID = 12013 + ORDER_CUSTOM_FIELD_14_OPTION_ID = 12014 + ORDER_CUSTOM_FIELD_0_VALUE = 13000 + ORDER_CUSTOM_FIELD_1_VALUE = 13001 + ORDER_CUSTOM_FIELD_2_VALUE = 13002 + ORDER_CUSTOM_FIELD_3_VALUE = 13003 + ORDER_CUSTOM_FIELD_4_VALUE = 13004 + ORDER_CUSTOM_FIELD_5_VALUE = 13005 + ORDER_CUSTOM_FIELD_6_VALUE = 13006 + ORDER_CUSTOM_FIELD_7_VALUE = 13007 + ORDER_CUSTOM_FIELD_8_VALUE = 13008 + ORDER_CUSTOM_FIELD_9_VALUE = 13009 + ORDER_CUSTOM_FIELD_10_VALUE = 13010 + ORDER_CUSTOM_FIELD_11_VALUE = 13011 + ORDER_CUSTOM_FIELD_12_VALUE = 13012 + ORDER_CUSTOM_FIELD_13_VALUE = 13013 + ORDER_CUSTOM_FIELD_14_VALUE = 13014 + CREATIVE_CUSTOM_FIELD_0_OPTION_ID = 14000 + CREATIVE_CUSTOM_FIELD_1_OPTION_ID = 14001 + CREATIVE_CUSTOM_FIELD_2_OPTION_ID = 14002 + CREATIVE_CUSTOM_FIELD_3_OPTION_ID = 14003 + CREATIVE_CUSTOM_FIELD_4_OPTION_ID = 14004 + CREATIVE_CUSTOM_FIELD_5_OPTION_ID = 14005 + CREATIVE_CUSTOM_FIELD_6_OPTION_ID = 14006 + CREATIVE_CUSTOM_FIELD_7_OPTION_ID = 14007 + CREATIVE_CUSTOM_FIELD_8_OPTION_ID = 14008 + CREATIVE_CUSTOM_FIELD_9_OPTION_ID = 14009 + CREATIVE_CUSTOM_FIELD_10_OPTION_ID = 14010 + CREATIVE_CUSTOM_FIELD_11_OPTION_ID = 14011 + CREATIVE_CUSTOM_FIELD_12_OPTION_ID = 14012 + CREATIVE_CUSTOM_FIELD_13_OPTION_ID = 14013 + CREATIVE_CUSTOM_FIELD_14_OPTION_ID = 14014 + CREATIVE_CUSTOM_FIELD_0_VALUE = 15000 + CREATIVE_CUSTOM_FIELD_1_VALUE = 15001 + CREATIVE_CUSTOM_FIELD_2_VALUE = 15002 + CREATIVE_CUSTOM_FIELD_3_VALUE = 15003 + CREATIVE_CUSTOM_FIELD_4_VALUE = 15004 + CREATIVE_CUSTOM_FIELD_5_VALUE = 15005 + CREATIVE_CUSTOM_FIELD_6_VALUE = 15006 + CREATIVE_CUSTOM_FIELD_7_VALUE = 15007 + CREATIVE_CUSTOM_FIELD_8_VALUE = 15008 + CREATIVE_CUSTOM_FIELD_9_VALUE = 15009 + CREATIVE_CUSTOM_FIELD_10_VALUE = 15010 + CREATIVE_CUSTOM_FIELD_11_VALUE = 15011 + CREATIVE_CUSTOM_FIELD_12_VALUE = 15012 + CREATIVE_CUSTOM_FIELD_13_VALUE = 15013 + CREATIVE_CUSTOM_FIELD_14_VALUE = 15014 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_0_OPTION_ID = 16000 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_1_OPTION_ID = 16001 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_2_OPTION_ID = 16002 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_3_OPTION_ID = 16003 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_4_OPTION_ID = 16004 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_5_OPTION_ID = 16005 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_6_OPTION_ID = 16006 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_7_OPTION_ID = 16007 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_8_OPTION_ID = 16008 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_9_OPTION_ID = 16009 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_10_OPTION_ID = 16010 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_11_OPTION_ID = 16011 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_12_OPTION_ID = 16012 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_13_OPTION_ID = 16013 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_14_OPTION_ID = 16014 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_0_VALUE = 17000 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_1_VALUE = 17001 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_2_VALUE = 17002 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_3_VALUE = 17003 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_4_VALUE = 17004 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_5_VALUE = 17005 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_6_VALUE = 17006 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_7_VALUE = 17007 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_8_VALUE = 17008 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_9_VALUE = 17009 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_10_VALUE = 17010 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_11_VALUE = 17011 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_12_VALUE = 17012 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_13_VALUE = 17013 + BACKFILL_LINE_ITEM_CUSTOM_FIELD_14_VALUE = 17014 + BACKFILL_ORDER_CUSTOM_FIELD_0_OPTION_ID = 18000 + BACKFILL_ORDER_CUSTOM_FIELD_1_OPTION_ID = 18001 + BACKFILL_ORDER_CUSTOM_FIELD_2_OPTION_ID = 18002 + BACKFILL_ORDER_CUSTOM_FIELD_3_OPTION_ID = 18003 + BACKFILL_ORDER_CUSTOM_FIELD_4_OPTION_ID = 18004 + BACKFILL_ORDER_CUSTOM_FIELD_5_OPTION_ID = 18005 + BACKFILL_ORDER_CUSTOM_FIELD_6_OPTION_ID = 18006 + BACKFILL_ORDER_CUSTOM_FIELD_7_OPTION_ID = 18007 + BACKFILL_ORDER_CUSTOM_FIELD_8_OPTION_ID = 18008 + BACKFILL_ORDER_CUSTOM_FIELD_9_OPTION_ID = 18009 + BACKFILL_ORDER_CUSTOM_FIELD_10_OPTION_ID = 18010 + BACKFILL_ORDER_CUSTOM_FIELD_11_OPTION_ID = 18011 + BACKFILL_ORDER_CUSTOM_FIELD_12_OPTION_ID = 18012 + BACKFILL_ORDER_CUSTOM_FIELD_13_OPTION_ID = 18013 + BACKFILL_ORDER_CUSTOM_FIELD_14_OPTION_ID = 18014 + BACKFILL_ORDER_CUSTOM_FIELD_0_VALUE = 19000 + BACKFILL_ORDER_CUSTOM_FIELD_1_VALUE = 19001 + BACKFILL_ORDER_CUSTOM_FIELD_2_VALUE = 19002 + BACKFILL_ORDER_CUSTOM_FIELD_3_VALUE = 19003 + BACKFILL_ORDER_CUSTOM_FIELD_4_VALUE = 19004 + BACKFILL_ORDER_CUSTOM_FIELD_5_VALUE = 19005 + BACKFILL_ORDER_CUSTOM_FIELD_6_VALUE = 19006 + BACKFILL_ORDER_CUSTOM_FIELD_7_VALUE = 19007 + BACKFILL_ORDER_CUSTOM_FIELD_8_VALUE = 19008 + BACKFILL_ORDER_CUSTOM_FIELD_9_VALUE = 19009 + BACKFILL_ORDER_CUSTOM_FIELD_10_VALUE = 19010 + BACKFILL_ORDER_CUSTOM_FIELD_11_VALUE = 19011 + BACKFILL_ORDER_CUSTOM_FIELD_12_VALUE = 19012 + BACKFILL_ORDER_CUSTOM_FIELD_13_VALUE = 19013 + BACKFILL_ORDER_CUSTOM_FIELD_14_VALUE = 19014 + BACKFILL_CREATIVE_CUSTOM_FIELD_0_OPTION_ID = 20000 + BACKFILL_CREATIVE_CUSTOM_FIELD_1_OPTION_ID = 20001 + BACKFILL_CREATIVE_CUSTOM_FIELD_2_OPTION_ID = 20002 + BACKFILL_CREATIVE_CUSTOM_FIELD_3_OPTION_ID = 20003 + BACKFILL_CREATIVE_CUSTOM_FIELD_4_OPTION_ID = 20004 + BACKFILL_CREATIVE_CUSTOM_FIELD_5_OPTION_ID = 20005 + BACKFILL_CREATIVE_CUSTOM_FIELD_6_OPTION_ID = 20006 + BACKFILL_CREATIVE_CUSTOM_FIELD_7_OPTION_ID = 20007 + BACKFILL_CREATIVE_CUSTOM_FIELD_8_OPTION_ID = 20008 + BACKFILL_CREATIVE_CUSTOM_FIELD_9_OPTION_ID = 20009 + BACKFILL_CREATIVE_CUSTOM_FIELD_10_OPTION_ID = 20010 + BACKFILL_CREATIVE_CUSTOM_FIELD_11_OPTION_ID = 20011 + BACKFILL_CREATIVE_CUSTOM_FIELD_12_OPTION_ID = 20012 + BACKFILL_CREATIVE_CUSTOM_FIELD_13_OPTION_ID = 20013 + BACKFILL_CREATIVE_CUSTOM_FIELD_14_OPTION_ID = 20014 + BACKFILL_CREATIVE_CUSTOM_FIELD_0_VALUE = 21000 + BACKFILL_CREATIVE_CUSTOM_FIELD_1_VALUE = 21001 + BACKFILL_CREATIVE_CUSTOM_FIELD_2_VALUE = 21002 + BACKFILL_CREATIVE_CUSTOM_FIELD_3_VALUE = 21003 + BACKFILL_CREATIVE_CUSTOM_FIELD_4_VALUE = 21004 + BACKFILL_CREATIVE_CUSTOM_FIELD_5_VALUE = 21005 + BACKFILL_CREATIVE_CUSTOM_FIELD_6_VALUE = 21006 + BACKFILL_CREATIVE_CUSTOM_FIELD_7_VALUE = 21007 + BACKFILL_CREATIVE_CUSTOM_FIELD_8_VALUE = 21008 + BACKFILL_CREATIVE_CUSTOM_FIELD_9_VALUE = 21009 + BACKFILL_CREATIVE_CUSTOM_FIELD_10_VALUE = 21010 + BACKFILL_CREATIVE_CUSTOM_FIELD_11_VALUE = 21011 + BACKFILL_CREATIVE_CUSTOM_FIELD_12_VALUE = 21012 + BACKFILL_CREATIVE_CUSTOM_FIELD_13_VALUE = 21013 + BACKFILL_CREATIVE_CUSTOM_FIELD_14_VALUE = 21014 + CUSTOM_DIMENSION_0_VALUE_ID = 100000 + CUSTOM_DIMENSION_1_VALUE_ID = 100001 + CUSTOM_DIMENSION_2_VALUE_ID = 100002 + CUSTOM_DIMENSION_3_VALUE_ID = 100003 + CUSTOM_DIMENSION_4_VALUE_ID = 100004 + CUSTOM_DIMENSION_5_VALUE_ID = 100005 + CUSTOM_DIMENSION_6_VALUE_ID = 100006 + CUSTOM_DIMENSION_7_VALUE_ID = 100007 + CUSTOM_DIMENSION_8_VALUE_ID = 100008 + CUSTOM_DIMENSION_9_VALUE_ID = 100009 + CUSTOM_DIMENSION_0_VALUE = 101000 + CUSTOM_DIMENSION_1_VALUE = 101001 + CUSTOM_DIMENSION_2_VALUE = 101002 + CUSTOM_DIMENSION_3_VALUE = 101003 + CUSTOM_DIMENSION_4_VALUE = 101004 + CUSTOM_DIMENSION_5_VALUE = 101005 + CUSTOM_DIMENSION_6_VALUE = 101006 + CUSTOM_DIMENSION_7_VALUE = 101007 + CUSTOM_DIMENSION_8_VALUE = 101008 + CUSTOM_DIMENSION_9_VALUE = 101009 + + class Metric(proto.Enum): + r"""Reporting metrics. + + Values: + METRIC_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME (61): + Active View total average time in seconds + that specific impressions are reported as being + viewable. + ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS (58): + Total number of impressions that were + eligible to measure viewability. + ACTIVE_VIEW_MEASURABLE_IMPRESSIONS (57): + The total number of impressions that were + sampled and measured by active view. + ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE (60): + The percentage of total impressions that were + measurable by active view (out of all the total + impressions sampled for active view). + ACTIVE_VIEW_VIEWABLE_IMPRESSIONS (56): + The total number of impressions viewed on the + user's screen. + ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE (59): + The percentage of total impressions viewed on + the user's screen (out of the total impressions + measurable by active view). + ADSENSE_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME (73): + Active View AdSense average time in seconds + that specific impressions are reported as being + viewable. + ADSENSE_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS (70): + Total number of impressions delivered by + AdSense that were eligible to measure + viewability. + ADSENSE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS (69): + The number of impressions delivered by + AdSense that were sampled, and measurable by + active view. + ADSENSE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE (72): + The percentage of impressions delivered by + AdSense that were measurable by active view (out + of all AdSense impressions sampled for active + view). + ADSENSE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS (68): + The number of impressions delivered by + AdSense viewed on the user's screen. + ADSENSE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE (71): + The percentage of impressions delivered by + AdSense viewed on the user's screen (out of + AdSense impressions measurable by active view). + ADSENSE_AVERAGE_ECPM (26): + The average effective + cost-per-thousand-impressions earned from the + ads delivered by AdSense through line item + dynamic allocation. + ADSENSE_CLICKS (23): + Number of clicks delivered by AdSense demand + channel. + ADSENSE_CTR (24): + The ratio of impressions served by AdSense + that resulted in users clicking on an ad. The + clickthrough rate (CTR) is updated nightly. The + AdSense CTR is calculated as: (AdSense clicks / + AdSense impressions). + ADSENSE_IMPRESSIONS (22): + Total impressions delivered by AdSense. + ADSENSE_PERCENT_CLICKS (28): + Ratio of clicks delivered by AdSense through + line item dynamic allocation in relation to the + total clicks delivered. + ADSENSE_PERCENT_IMPRESSIONS (27): + Ratio of impressions delivered by AdSense + through line item dynamic allocation in relation + to the total impressions delivered. + ADSENSE_PERCENT_REVENUE (29): + Ratio of revenue generated by AdSense through + line item dynamic allocation in relation to the + total revenue. + ADSENSE_PERCENT_REVENUE_WITHOUT_CPD (30): + Ratio of revenue generated by AdSense through + line item dynamic allocation in relation to the + total revenue (excluding CPD). + ADSENSE_RESPONSES_SERVED (41): + The total number of times that an AdSense ad + is delivered. + ADSENSE_REVENUE (25): + Revenue generated from AdSense through line + item dynamic allocation, calculated in the + network's currency and time zone. + AD_EXCHANGE_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME (79): + Active View AdExchange average time in + seconds that specific impressions are reported + as being viewable. + AD_EXCHANGE_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS (76): + Total number of impressions delivered by Ad + Exchange that were eligible to measure + viewability. + AD_EXCHANGE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS (75): + The number of impressions delivered by Ad + Exchange that were sampled, and measurable by + active view. + AD_EXCHANGE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE (78): + The percentage of impressions delivered by Ad + Exchange that were measurable by active view + (out of all Ad Exchange impressions sampled for + active view). + AD_EXCHANGE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS (74): + The number of impressions delivered by Ad + Exchange viewed on the user's screen. + AD_EXCHANGE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE (77): + The percentage of impressions delivered by Ad + Exchange viewed on the user's screen (out of Ad + Exchange impressions measurable by active view). + AD_EXCHANGE_AVERAGE_ECPM (18): + The average effective + cost-per-thousand-impressions earned from the + ads delivered by Ad Exchange through line item + dynamic allocation. + AD_EXCHANGE_CLICKS (15): + Number of clicks delivered by the Ad + Exchange. + AD_EXCHANGE_CTR (16): + The ratio of impressions served by the Ad + Exchange that resulted in users clicking on an + ad. The clickthrough rate (CTR) is updated + nightly. Ad Exchange CTR is calculated as: (Ad + Exchange clicks / Ad Exchange impressions). + AD_EXCHANGE_IMPRESSIONS (14): + Total impressions delivered by the Ad + Exchange. + AD_EXCHANGE_PERCENT_CLICKS (20): + Ratio of clicks delivered by Ad Exchange + through line item dynamic allocation in relation + to the total clicks delivered. + AD_EXCHANGE_PERCENT_IMPRESSIONS (19): + Ratio of impressions delivered by Ad Exchange + through line item dynamic allocation in relation + to the total impressions delivered. + AD_EXCHANGE_PERCENT_REVENUE (21): + Ratio of revenue generated by Ad Exchange + through line item dynamic allocation in relation + to the total revenue. + AD_EXCHANGE_PERCENT_REVENUE_WITHOUT_CPD (31): + Ratio of revenue generated by Ad Exchange + through line item dynamic allocation in relation + to the total revenue (excluding CPD). + AD_EXCHANGE_RESPONSES_SERVED (42): + The total number of times that an Ad Exchange + ad is delivered. + AD_EXCHANGE_REVENUE (17): + Revenue generated from the Ad Exchange + through line item dynamic allocation, calculated + in your network's currency and time zone. + AD_REQUESTS (38): + The total number of times that an ad request + is sent to the ad server including dynamic + allocation. + AD_SERVER_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME (67): + Active View ad server average time in seconds + that specific impressions are reported as being + viewable. + AD_SERVER_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS (64): + Total number of impressions delivered by the + ad server that were eligible to measure + viewability. + AD_SERVER_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS (63): + The number of impressions delivered by the ad + server that were sampled, and measurable by + active view. + AD_SERVER_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE (66): + The percentage of impressions delivered by + the ad server that were measurable by active + view (out of all the ad server impressions + sampled for active view). + AD_SERVER_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS (62): + The number of impressions delivered by the ad + server viewed on the user's screen. + AD_SERVER_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE (65): + The percentage of impressions delivered by + the ad server viewed on the user's screen (out + of the ad server impressions measurable by + active view). + AD_SERVER_AVERAGE_ECPM (34): + Average effective + cost-per-thousand-impressions earned from the + ads delivered by the Google Ad Manager server. + AD_SERVER_AVERAGE_ECPM_WITHOUT_CPD (10): + Average effective + cost-per-thousand-impressions earned from the + ads delivered by the Google Ad Manager server, + excluding CPD value. + AD_SERVER_CLICKS (7): + Total clicks served by the Google Ad Manager + server. It usually takes about 30 minutes for + new clicks to be recorded and added to the total + displayed in reporting. + AD_SERVER_CPD_REVENUE (32): + CPD revenue earned, calculated in your + network's currency, for the ads delivered by the + Google Ad Manager server. Sum of all booked + revenue. + AD_SERVER_CTR (8): + Ratio of impressions served by the Google Ad + Manager server that resulted in users clicking + on an ad. The clickthrough rate (CTR) is updated + nightly. The ad server CTR is calculated as: (Ad + server clicks / Ad server impressions). + AD_SERVER_IMPRESSIONS (6): + Total impressions delivered by the Ad Server. + AD_SERVER_PERCENT_CLICKS (12): + Ratio of clicks delivered by the Google Ad + Manager server in relation to the total clicks + delivered. + AD_SERVER_PERCENT_IMPRESSIONS (11): + Ratio of impressions delivered by the Google + Ad Manager server in relation to the total + impressions delivered. + AD_SERVER_PERCENT_REVENUE (35): + Ratio of revenue generated by the Google Ad + Manager server in relation to the total revenue. + AD_SERVER_PERCENT_REVENUE_WITHOUT_CPD (13): + Ratio of revenue generated by the Google Ad + Manager server (excluding CPD) in relation to + the total revenue. + AD_SERVER_RESPONSES_SERVED (40): + The total number of times that an ad is + served by the ad server. + AD_SERVER_REVENUE (33): + All CPM, CPC, and CPD revenue earned, + calculated in your network's currency, for the + ads delivered by the Google Ad Manager server. + Sum of all booked revenue. + AD_SERVER_REVENUE_WITHOUT_CPD (9): + Revenue (excluding CPD) earned, calculated in + your network's currency, for the ads delivered + by the Google Ad Manager server. Sum of all + booked revenue. + AUCTIONS_WON (80): + Number of winning bids received from Open + Bidding buyers, even when the winning bid is + placed at the end of a mediation for mobile apps + chain. + AVERAGE_ECPM (37): + eCPM averaged across the Google Ad Manager + server, AdSense, and Ad Exchange. + AVERAGE_ECPM_WITHOUT_CPD (5): + eCPM averaged across the Google Ad Manager + server (excluding CPD), AdSense, and Ad + Exchange. + BIDS (81): + Number of bids received from Open Bidding + buyers, regardless of whether the returned bid + competes in an auction. + BIDS_IN_AUCTION (82): + Number of bids received from Open Bidding + buyers that competed in the auction. + CALLOUTS (83): + Number of times a yield partner is asked to + return bid to fill a yield group request. + CLICKS (2): + The number of times a user clicked on an ad. + CODE_SERVED_COUNT (44): + The total number of times that the code for + an ad is served by the ad server including + dynamic allocation. + CTR (3): + For standard ads, your ad clickthrough rate + (CTR) is the number of ad clicks divided by the + number of individual ad impressions expressed as + a fraction. Ad CTR = Clicks / Ad impressions. + GOOGLE_SOLD_AUCTION_COVIEWED_IMPRESSIONS (129): + The number of coviewed impressions sold by + Google in partner sales. + GOOGLE_SOLD_AUCTION_IMPRESSIONS (128): + The number of auction impressions sold by + Google in partner sales. + GOOGLE_SOLD_COVIEWED_IMPRESSIONS (131): + The number of coviewed impressions sold by + Google in partner sales. + GOOGLE_SOLD_IMPRESSIONS (130): + The number of impressions sold by Google in + partner sales. + GOOGLE_SOLD_RESERVATION_COVIEWED_IMPRESSIONS (127): + The number of coviewed impressions sold by + Google in partner sales. + GOOGLE_SOLD_RESERVATION_IMPRESSIONS (126): + The number of reservation impressions sold by + Google in partner sales. + IMPRESSIONS (1): + Total impressions from the Google Ad Manager + server, AdSense, Ad Exchange, and yield group + partners. + PARTNER_SALES_FILLED_POD_REQUESTS (135): + The number of filled pod requests (filled by + partner or Google) in partner sales. + PARTNER_SALES_FILL_RATE (136): + The percent of filled requests to total ad + requests in partner sales. + PARTNER_SALES_PARTNER_MATCH_RATE (137): + The percent of partner filled requests to + total ad requests in partner sales. + PARTNER_SALES_QUERIES (132): + The number of queries eligible for partner + sales. + PARTNER_SALES_UNFILLED_IMPRESSIONS (133): + The number of partner unfilled impressions in + partner sales. If a pod request is not filled by + partner but filled by Google, this metric will + still count 1. + PARTNER_SALES_UNMATCHED_QUERIES (134): + The number of partner unmatched queries in + partner sales. If an ad request is not filled by + partner but filled by Google, this metric will + still count 1. + PARTNER_SOLD_CODE_SERVED (125): + The number of code served sold by partner in + partner sales. + PARTNER_SOLD_COVIEWED_IMPRESSIONS (124): + The number of coviewed impressions sold by + partner in partner sales. + PARTNER_SOLD_IMPRESSIONS (123): + The number of impressions sold by partner in + partner sales. + PROGRAMMATIC_ELIGIBLE_AD_REQUESTS (177): + The total number of ad requests eligible for + programmatic inventory, including Programmatic + Guaranteed, Preferred Deals, backfill, and open + auction. + PROGRAMMATIC_MATCH_RATE (178): + The number of programmatic responses served + divided by the number of programmatic eligible + ad requests. Includes Ad Exchange, Open Bidding, + and Preferred Deals. + PROGRAMMATIC_RESPONSES_SERVED (176): + Total number of ad responses served from programmatic demand + sources. Includes Ad Exchange, Open Bidding, and Preferred + Deals. + + Differs from AD_EXCHANGE_RESPONSES_SERVED, which doesn't + include Open Bidding ad requests. + RESPONSES_SERVED (39): + The total number of times that an ad is + served by the ad server including dynamic + allocation. + REVENUE (36): + Total amount of CPM, CPC, and CPD revenue + based on the number of units served by the + Google Ad Manager server, AdSense, Ad Exchange, + and third-party Mediation networks. + REVENUE_WITHOUT_CPD (4): + Total amount of revenue (excluding CPD) based + on the number of units served by the Google Ad + Manager server, AdSense, Ad Exchange, and + third-party Mediation networks. + SUCCESSFUL_RESPONSES (84): + Number of times a yield group buyer + successfully returned a bid in response to a + yield group callout. + UNFILLED_IMPRESSIONS (45): + The total number of missed impressions due to + the ad servers' inability to find ads to serve + including dynamic allocation. + UNMATCHED_AD_REQUESTS (43): + The total number of times that an ad is not + returned by the ad server. + USER_MESSAGES_OFFERWALL_MESSAGES_SHOWN (121): + Number of times an Offerwall message was + shown to users. + USER_MESSAGES_OFFERWALL_SUCCESSFUL_ENGAGEMENTS (122): + The number of messages where the user gained + an entitlement. + VIDEO_INTERACTION_AVERAGE_INTERACTION_RATE (92): + The number of user interactions with a video, + on average, such as pause, full screen, mute, + etc. + VIDEO_INTERACTION_COLLAPSES (93): + The number of times a user collapses a video, + either to its original size or to a different + size. + VIDEO_INTERACTION_EXPANDS (95): + The number of times a user expands a video. + VIDEO_INTERACTION_FULL_SCREENS (96): + The number of times ad clip played in full + screen mode. + VIDEO_INTERACTION_MUTES (97): + The number of times video player was in mute + state during play of ad clip. + VIDEO_INTERACTION_PAUSES (98): + The number of times user paused ad clip. + VIDEO_INTERACTION_RESUMES (99): + The number of times the user unpaused the + video. + VIDEO_INTERACTION_REWINDS (100): + The number of times a user rewinds the video. + VIDEO_INTERACTION_UNMUTES (101): + The number of times a user unmutes the video. + VIDEO_INTERACTION_VIDEO_SKIPS (102): + The number of times a skippable video is + skipped. + VIDEO_REAL_TIME_CREATIVE_SERVES (139): + The number of total creative serves in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_100_COUNT (143): + The number of errors of type 100 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_101_COUNT (144): + The number of errors of type 101 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_102_COUNT (145): + The number of errors of type 102 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_200_COUNT (146): + The number of errors of type 200 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_201_COUNT (147): + The number of errors of type 201 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_202_COUNT (148): + The number of errors of type 202 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_203_COUNT (149): + The number of errors of type 203 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_300_COUNT (150): + The number of errors of type 300 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_301_COUNT (151): + The number of errors of type 301 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_302_COUNT (152): + The number of errors of type 302 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_303_COUNT (153): + The number of errors of type 303 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_400_COUNT (154): + The number of errors of type 400 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_401_COUNT (155): + The number of errors of type 401 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_402_COUNT (156): + The number of errors of type 402 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_403_COUNT (157): + The number of errors of type 403 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_405_COUNT (158): + The number of errors of type 405 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_406_COUNT (159): + The number of errors of type 406 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_407_COUNT (160): + The number of errors of type 407 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_408_COUNT (161): + The number of errors of type 408 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_409_COUNT (162): + The number of errors of type 409 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_410_COUNT (163): + The number of errors of type 410 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_500_COUNT (164): + The number of errors of type 500 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_501_COUNT (165): + The number of errors of type 501 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_502_COUNT (166): + The number of errors of type 502 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_503_COUNT (167): + The number of errors of type 503 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_600_COUNT (168): + The number of errors of type 600 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_601_COUNT (169): + The number of errors of type 601 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_602_COUNT (170): + The number of errors of type 602 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_603_COUNT (171): + The number of errors of type 603 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_604_COUNT (172): + The number of errors of type 604 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_900_COUNT (173): + The number of errors of type 900 in video + realtime reporting. + VIDEO_REAL_TIME_ERROR_901_COUNT (174): + The number of errors of type 901 in video + realtime reporting. + VIDEO_REAL_TIME_IMPRESSIONS (138): + The number of total impressions in video + realtime reporting. + VIDEO_REAL_TIME_MATCHED_QUERIES (140): + The number of matched queries in video + realtime reporting. + VIDEO_REAL_TIME_TOTAL_ERROR_COUNT (175): + The number of all errors in video realtime + reporting. + VIDEO_REAL_TIME_TOTAL_QUERIES (142): + The number of total queries in video realtime + reporting. + VIDEO_REAL_TIME_UNMATCHED_QUERIES (141): + The number of unmatched queries in video + realtime reporting. + VIDEO_VIEWERSHIP_AUTO_PLAYS (103): + Number of times that the publisher specified + a video ad played automatically. + VIDEO_VIEWERSHIP_AVERAGE_VIEW_RATE (104): + Average percentage of the video watched by + users. + VIDEO_VIEWERSHIP_AVERAGE_VIEW_TIME (105): + Average time(seconds) users watched the + video. + VIDEO_VIEWERSHIP_CLICK_TO_PLAYS (106): + Number of times that the publisher specified + a video ad was clicked to play. + VIDEO_VIEWERSHIP_COMPLETES (107): + The number of times the video played to + completion. + VIDEO_VIEWERSHIP_COMPLETION_RATE (108): + Percentage of times the video played to the + end. + VIDEO_VIEWERSHIP_ENGAGED_VIEWS (109): + The number of engaged views: ad is viewed to + completion or for 30s, whichever comes first. + VIDEO_VIEWERSHIP_FIRST_QUARTILES (110): + The number of times the video played to 25% + of its length. + VIDEO_VIEWERSHIP_MIDPOINTS (111): + The number of times the video reached its + midpoint during play. + VIDEO_VIEWERSHIP_SKIP_BUTTONS_SHOWN (112): + The number of times a skip button is shown in + video. + VIDEO_VIEWERSHIP_STARTS (113): + The number of impressions where the video was + played. + VIDEO_VIEWERSHIP_THIRD_QUARTILES (114): + The number of times the video played to 75% + of its length. + VIDEO_VIEWERSHIP_TOTAL_ERROR_COUNT (115): + The number of times an error occurred, such + as a VAST redirect error, a video playback + error, or an invalid response error. + VIDEO_VIEWERSHIP_TOTAL_ERROR_RATE (94): + The percentage of video error count. + VIDEO_VIEWERSHIP_VIDEO_LENGTH (116): + Duration of the video creative. + VIDEO_VIEWERSHIP_VIEW_THROUGH_RATE (117): + View-through rate represented as a + percentage. + YIELD_GROUP_ESTIMATED_CPM (88): + The estimated net rate for yield groups or + individual yield group partners. + YIELD_GROUP_ESTIMATED_REVENUE (87): + Total net revenue earned by a yield group, + based upon the yield group estimated CPM and + yield group impressions recorded. + YIELD_GROUP_IMPRESSIONS (85): + Number of matched yield group requests where + a yield partner delivered their ad to publisher + inventory. + YIELD_GROUP_MEDIATION_FILL_RATE (89): + Yield group Mediation fill rate indicating + how often a network fills an ad request. + YIELD_GROUP_MEDIATION_MATCHED_QUERIES (86): + Total requests where a Mediation chain was + served. + YIELD_GROUP_MEDIATION_PASSBACKS (118): + The number of mediation chain passback across + all channels. + YIELD_GROUP_MEDIATION_THIRD_PARTY_ECPM (90): + Revenue per thousand impressions based on + data collected by Ad Manager from third-party ad + network reports. + """ + METRIC_UNSPECIFIED = 0 + ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME = 61 + ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS = 58 + ACTIVE_VIEW_MEASURABLE_IMPRESSIONS = 57 + ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE = 60 + ACTIVE_VIEW_VIEWABLE_IMPRESSIONS = 56 + ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE = 59 + ADSENSE_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME = 73 + ADSENSE_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS = 70 + ADSENSE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS = 69 + ADSENSE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE = 72 + ADSENSE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS = 68 + ADSENSE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE = 71 + ADSENSE_AVERAGE_ECPM = 26 + ADSENSE_CLICKS = 23 + ADSENSE_CTR = 24 + ADSENSE_IMPRESSIONS = 22 + ADSENSE_PERCENT_CLICKS = 28 + ADSENSE_PERCENT_IMPRESSIONS = 27 + ADSENSE_PERCENT_REVENUE = 29 + ADSENSE_PERCENT_REVENUE_WITHOUT_CPD = 30 + ADSENSE_RESPONSES_SERVED = 41 + ADSENSE_REVENUE = 25 + AD_EXCHANGE_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME = 79 + AD_EXCHANGE_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS = 76 + AD_EXCHANGE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS = 75 + AD_EXCHANGE_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE = 78 + AD_EXCHANGE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS = 74 + AD_EXCHANGE_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE = 77 + AD_EXCHANGE_AVERAGE_ECPM = 18 + AD_EXCHANGE_CLICKS = 15 + AD_EXCHANGE_CTR = 16 + AD_EXCHANGE_IMPRESSIONS = 14 + AD_EXCHANGE_PERCENT_CLICKS = 20 + AD_EXCHANGE_PERCENT_IMPRESSIONS = 19 + AD_EXCHANGE_PERCENT_REVENUE = 21 + AD_EXCHANGE_PERCENT_REVENUE_WITHOUT_CPD = 31 + AD_EXCHANGE_RESPONSES_SERVED = 42 + AD_EXCHANGE_REVENUE = 17 + AD_REQUESTS = 38 + AD_SERVER_ACTIVE_VIEW_AVERAGE_VIEWABLE_TIME = 67 + AD_SERVER_ACTIVE_VIEW_ELIGIBLE_IMPRESSIONS = 64 + AD_SERVER_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS = 63 + AD_SERVER_ACTIVE_VIEW_MEASURABLE_IMPRESSIONS_RATE = 66 + AD_SERVER_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS = 62 + AD_SERVER_ACTIVE_VIEW_VIEWABLE_IMPRESSIONS_RATE = 65 + AD_SERVER_AVERAGE_ECPM = 34 + AD_SERVER_AVERAGE_ECPM_WITHOUT_CPD = 10 + AD_SERVER_CLICKS = 7 + AD_SERVER_CPD_REVENUE = 32 + AD_SERVER_CTR = 8 + AD_SERVER_IMPRESSIONS = 6 + AD_SERVER_PERCENT_CLICKS = 12 + AD_SERVER_PERCENT_IMPRESSIONS = 11 + AD_SERVER_PERCENT_REVENUE = 35 + AD_SERVER_PERCENT_REVENUE_WITHOUT_CPD = 13 + AD_SERVER_RESPONSES_SERVED = 40 + AD_SERVER_REVENUE = 33 + AD_SERVER_REVENUE_WITHOUT_CPD = 9 + AUCTIONS_WON = 80 + AVERAGE_ECPM = 37 + AVERAGE_ECPM_WITHOUT_CPD = 5 + BIDS = 81 + BIDS_IN_AUCTION = 82 + CALLOUTS = 83 + CLICKS = 2 + CODE_SERVED_COUNT = 44 + CTR = 3 + GOOGLE_SOLD_AUCTION_COVIEWED_IMPRESSIONS = 129 + GOOGLE_SOLD_AUCTION_IMPRESSIONS = 128 + GOOGLE_SOLD_COVIEWED_IMPRESSIONS = 131 + GOOGLE_SOLD_IMPRESSIONS = 130 + GOOGLE_SOLD_RESERVATION_COVIEWED_IMPRESSIONS = 127 + GOOGLE_SOLD_RESERVATION_IMPRESSIONS = 126 + IMPRESSIONS = 1 + PARTNER_SALES_FILLED_POD_REQUESTS = 135 + PARTNER_SALES_FILL_RATE = 136 + PARTNER_SALES_PARTNER_MATCH_RATE = 137 + PARTNER_SALES_QUERIES = 132 + PARTNER_SALES_UNFILLED_IMPRESSIONS = 133 + PARTNER_SALES_UNMATCHED_QUERIES = 134 + PARTNER_SOLD_CODE_SERVED = 125 + PARTNER_SOLD_COVIEWED_IMPRESSIONS = 124 + PARTNER_SOLD_IMPRESSIONS = 123 + PROGRAMMATIC_ELIGIBLE_AD_REQUESTS = 177 + PROGRAMMATIC_MATCH_RATE = 178 + PROGRAMMATIC_RESPONSES_SERVED = 176 + RESPONSES_SERVED = 39 + REVENUE = 36 + REVENUE_WITHOUT_CPD = 4 + SUCCESSFUL_RESPONSES = 84 + UNFILLED_IMPRESSIONS = 45 + UNMATCHED_AD_REQUESTS = 43 + USER_MESSAGES_OFFERWALL_MESSAGES_SHOWN = 121 + USER_MESSAGES_OFFERWALL_SUCCESSFUL_ENGAGEMENTS = 122 + VIDEO_INTERACTION_AVERAGE_INTERACTION_RATE = 92 + VIDEO_INTERACTION_COLLAPSES = 93 + VIDEO_INTERACTION_EXPANDS = 95 + VIDEO_INTERACTION_FULL_SCREENS = 96 + VIDEO_INTERACTION_MUTES = 97 + VIDEO_INTERACTION_PAUSES = 98 + VIDEO_INTERACTION_RESUMES = 99 + VIDEO_INTERACTION_REWINDS = 100 + VIDEO_INTERACTION_UNMUTES = 101 + VIDEO_INTERACTION_VIDEO_SKIPS = 102 + VIDEO_REAL_TIME_CREATIVE_SERVES = 139 + VIDEO_REAL_TIME_ERROR_100_COUNT = 143 + VIDEO_REAL_TIME_ERROR_101_COUNT = 144 + VIDEO_REAL_TIME_ERROR_102_COUNT = 145 + VIDEO_REAL_TIME_ERROR_200_COUNT = 146 + VIDEO_REAL_TIME_ERROR_201_COUNT = 147 + VIDEO_REAL_TIME_ERROR_202_COUNT = 148 + VIDEO_REAL_TIME_ERROR_203_COUNT = 149 + VIDEO_REAL_TIME_ERROR_300_COUNT = 150 + VIDEO_REAL_TIME_ERROR_301_COUNT = 151 + VIDEO_REAL_TIME_ERROR_302_COUNT = 152 + VIDEO_REAL_TIME_ERROR_303_COUNT = 153 + VIDEO_REAL_TIME_ERROR_400_COUNT = 154 + VIDEO_REAL_TIME_ERROR_401_COUNT = 155 + VIDEO_REAL_TIME_ERROR_402_COUNT = 156 + VIDEO_REAL_TIME_ERROR_403_COUNT = 157 + VIDEO_REAL_TIME_ERROR_405_COUNT = 158 + VIDEO_REAL_TIME_ERROR_406_COUNT = 159 + VIDEO_REAL_TIME_ERROR_407_COUNT = 160 + VIDEO_REAL_TIME_ERROR_408_COUNT = 161 + VIDEO_REAL_TIME_ERROR_409_COUNT = 162 + VIDEO_REAL_TIME_ERROR_410_COUNT = 163 + VIDEO_REAL_TIME_ERROR_500_COUNT = 164 + VIDEO_REAL_TIME_ERROR_501_COUNT = 165 + VIDEO_REAL_TIME_ERROR_502_COUNT = 166 + VIDEO_REAL_TIME_ERROR_503_COUNT = 167 + VIDEO_REAL_TIME_ERROR_600_COUNT = 168 + VIDEO_REAL_TIME_ERROR_601_COUNT = 169 + VIDEO_REAL_TIME_ERROR_602_COUNT = 170 + VIDEO_REAL_TIME_ERROR_603_COUNT = 171 + VIDEO_REAL_TIME_ERROR_604_COUNT = 172 + VIDEO_REAL_TIME_ERROR_900_COUNT = 173 + VIDEO_REAL_TIME_ERROR_901_COUNT = 174 + VIDEO_REAL_TIME_IMPRESSIONS = 138 + VIDEO_REAL_TIME_MATCHED_QUERIES = 140 + VIDEO_REAL_TIME_TOTAL_ERROR_COUNT = 175 + VIDEO_REAL_TIME_TOTAL_QUERIES = 142 + VIDEO_REAL_TIME_UNMATCHED_QUERIES = 141 + VIDEO_VIEWERSHIP_AUTO_PLAYS = 103 + VIDEO_VIEWERSHIP_AVERAGE_VIEW_RATE = 104 + VIDEO_VIEWERSHIP_AVERAGE_VIEW_TIME = 105 + VIDEO_VIEWERSHIP_CLICK_TO_PLAYS = 106 + VIDEO_VIEWERSHIP_COMPLETES = 107 + VIDEO_VIEWERSHIP_COMPLETION_RATE = 108 + VIDEO_VIEWERSHIP_ENGAGED_VIEWS = 109 + VIDEO_VIEWERSHIP_FIRST_QUARTILES = 110 + VIDEO_VIEWERSHIP_MIDPOINTS = 111 + VIDEO_VIEWERSHIP_SKIP_BUTTONS_SHOWN = 112 + VIDEO_VIEWERSHIP_STARTS = 113 + VIDEO_VIEWERSHIP_THIRD_QUARTILES = 114 + VIDEO_VIEWERSHIP_TOTAL_ERROR_COUNT = 115 + VIDEO_VIEWERSHIP_TOTAL_ERROR_RATE = 94 + VIDEO_VIEWERSHIP_VIDEO_LENGTH = 116 + VIDEO_VIEWERSHIP_VIEW_THROUGH_RATE = 117 + YIELD_GROUP_ESTIMATED_CPM = 88 + YIELD_GROUP_ESTIMATED_REVENUE = 87 + YIELD_GROUP_IMPRESSIONS = 85 + YIELD_GROUP_MEDIATION_FILL_RATE = 89 + YIELD_GROUP_MEDIATION_MATCHED_QUERIES = 86 + YIELD_GROUP_MEDIATION_PASSBACKS = 118 + YIELD_GROUP_MEDIATION_THIRD_PARTY_ECPM = 90 + + class MetricValueType(proto.Enum): + r"""Possible metric value types to add. + + Values: + PRIMARY (0): + The values for the primary date_range. + PRIMARY_PERCENT_OF_TOTAL (1): + Each metrics' percent of the total for the primary + date_range. + COMPARISON (2): + The values for the comparison_date_range. + COMPARISON_PERCENT_OF_TOTAL (3): + Each metrics' percent of the total for the + comparison_date_range. + ABSOLUTE_CHANGE (4): + The absolute change between the primary and + comparison date ranges. + RELATIVE_CHANGE (5): + The relative change between the primary and + comparison date ranges. + """ + PRIMARY = 0 + PRIMARY_PERCENT_OF_TOTAL = 1 + COMPARISON = 2 + COMPARISON_PERCENT_OF_TOTAL = 3 + ABSOLUTE_CHANGE = 4 + RELATIVE_CHANGE = 5 + + class ReportType(proto.Enum): + r"""Supported report types. + + Values: + REPORT_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + HISTORICAL (1): + Historical. + """ + REPORT_TYPE_UNSPECIFIED = 0 + HISTORICAL = 1 + + class Visibility(proto.Enum): + r"""The visibility of a report. + + Values: + HIDDEN (0): + Default value. Reports with hidden visibility + will not appear in the Ad Manager UI. + DRAFT (1): + Reports with draft visibility will appear in + the Ad Manager UI only if the user has + configured the UI to show them. + SAVED (2): + Reports with saved visibility will appear in + the Ad Manager UI by default. + """ + HIDDEN = 0 + DRAFT = 1 + SAVED = 2 + + class Value(proto.Message): + r"""Represents a single value in a report. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + int_value (int): + For integer values. + + This field is a member of `oneof`_ ``value``. + double_value (float): + For double values. + + This field is a member of `oneof`_ ``value``. + string_value (str): + For string values. + + This field is a member of `oneof`_ ``value``. + bool_value (bool): + For boolean values. + + This field is a member of `oneof`_ ``value``. + int_list_value (google.ads.admanager_v1.types.Report.Value.IntList): + For lists of integer values. + + This field is a member of `oneof`_ ``value``. + string_list_value (google.ads.admanager_v1.types.Report.Value.StringList): + For lists of string values. + + This field is a member of `oneof`_ ``value``. + bytes_value (bytes): + For bytes values. + + This field is a member of `oneof`_ ``value``. + """ + + class IntList(proto.Message): + r"""A list of integer values. + + Attributes: + values (MutableSequence[int]): + The values + """ + + values: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=1, + ) + + class StringList(proto.Message): + r"""A list of string values. + + Attributes: + values (MutableSequence[str]): + The values + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + int_value: int = proto.Field( + proto.INT64, + number=1, + oneof="value", + ) + double_value: float = proto.Field( + proto.DOUBLE, + number=2, + oneof="value", + ) + string_value: str = proto.Field( + proto.STRING, + number=3, + oneof="value", + ) + bool_value: bool = proto.Field( + proto.BOOL, + number=4, + oneof="value", + ) + int_list_value: "Report.Value.IntList" = proto.Field( + proto.MESSAGE, + number=6, + oneof="value", + message="Report.Value.IntList", + ) + string_list_value: "Report.Value.StringList" = proto.Field( + proto.MESSAGE, + number=7, + oneof="value", + message="Report.Value.StringList", + ) + bytes_value: bytes = proto.Field( + proto.BYTES, + number=8, + oneof="value", + ) + + class Sort(proto.Message): + r"""Represents a sorting in a report. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.ads.admanager_v1.types.Report.Field): + Required. A field (dimension or metric) to + sort by. + descending (bool): + Optional. The sort order. If true the sort + will be descending. + slice_ (google.ads.admanager_v1.types.Report.Slice): + Optional. Use to sort on a specific slice of + data. + + This field is a member of `oneof`_ ``_slice``. + time_period_index (int): + Optional. When using time period columns, use + this to sort on a specific column. + + This field is a member of `oneof`_ ``_time_period_index``. + metric_value_type (google.ads.admanager_v1.types.Report.MetricValueType): + Optional. Use to specify which metric value + type to sort on. Defaults to PRIMARY. + + This field is a member of `oneof`_ ``_metric_value_type``. + """ + + field: "Report.Field" = proto.Field( + proto.MESSAGE, + number=1, + message="Report.Field", + ) + descending: bool = proto.Field( + proto.BOOL, + number=2, + ) + slice_: "Report.Slice" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="Report.Slice", + ) + time_period_index: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + metric_value_type: "Report.MetricValueType" = proto.Field( + proto.ENUM, + number=5, + optional=True, + enum="Report.MetricValueType", + ) + + class DataTable(proto.Message): + r"""A table containing report data including dimension and metric + values. + + """ + + class Row(proto.Message): + r"""A row of report data. + + Attributes: + dimension_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + The order of the dimension values is the same + as the order of the dimensions specified in the + request. + metric_value_groups (MutableSequence[google.ads.admanager_v1.types.Report.DataTable.MetricValueGroup]): + The length of the metric_value_groups field will be equal to + the length of the date_ranges field in the fetch response. + The metric_value_groups field is ordered such that each + index corresponds to the date_range at the same index. For + example, given date_ranges [x, y], metric_value_groups will + have a length of two. The first entry in metric_value_groups + represents the metrics for date x and the second entry in + metric_value_groups represents the metrics for date y. + """ + + dimension_values: MutableSequence["Report.Value"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Report.Value", + ) + metric_value_groups: MutableSequence[ + "Report.DataTable.MetricValueGroup" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Report.DataTable.MetricValueGroup", + ) + + class MetricValueGroup(proto.Message): + r"""Contains all metric values requested for a single date range + and set of column dimension values (returned in the columns + field of the response). The order of the metrics in each field + corresponds to the order of the metrics specified in the + request. + + Attributes: + primary_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Data for the PRIMARY MetricValueType. + primary_percent_of_total_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Data for the PRIMARY_PERCENT_OF_TOTAL MetricValueType. + comparison_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Data for the COMPARISON MetricValueType. + comparison_percent_of_total_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Data for the COMPARISON_PERCENT_OF_TOTAL MetricValueType. + absolute_change_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Data for the ABSOLUTE_CHANGE MetricValueType. + relative_change_values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Data for the RELATIVE_CHANGE MetricValueType. + flag_values (MutableSequence[bool]): + If true, the flag's conditions are met. If false, the flag's + conditions are not met. flag_values has the same length as + flags and index i of flag_values represents the flag at + index i of flags. + """ + + primary_values: MutableSequence["Report.Value"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Report.Value", + ) + primary_percent_of_total_values: MutableSequence[ + "Report.Value" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Report.Value", + ) + comparison_values: MutableSequence["Report.Value"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Report.Value", + ) + comparison_percent_of_total_values: MutableSequence[ + "Report.Value" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Report.Value", + ) + absolute_change_values: MutableSequence[ + "Report.Value" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="Report.Value", + ) + relative_change_values: MutableSequence[ + "Report.Value" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="Report.Value", + ) + flag_values: MutableSequence[bool] = proto.RepeatedField( + proto.BOOL, + number=7, + ) + + class Field(proto.Message): + r"""A dimension or a metric in a report. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dimension (google.ads.admanager_v1.types.Report.Dimension): + The dimension this field represents. + + This field is a member of `oneof`_ ``field``. + metric (google.ads.admanager_v1.types.Report.Metric): + The metric this field represents. + + This field is a member of `oneof`_ ``field``. + """ + + dimension: "Report.Dimension" = proto.Field( + proto.ENUM, + number=1, + oneof="field", + enum="Report.Dimension", + ) + metric: "Report.Metric" = proto.Field( + proto.ENUM, + number=2, + oneof="field", + enum="Report.Metric", + ) + + class Slice(proto.Message): + r"""Use to specify a slice of data. + + For example, in a report, to focus on just data from the US, specify + ``COUNTRY_NAME`` for dimension and value: ``"United States"``. + + Attributes: + dimension (google.ads.admanager_v1.types.Report.Dimension): + Required. The dimension to slice on. + value (google.ads.admanager_v1.types.Report.Value): + Required. The value of the dimension. + """ + + dimension: "Report.Dimension" = proto.Field( + proto.ENUM, + number=1, + enum="Report.Dimension", + ) + value: "Report.Value" = proto.Field( + proto.MESSAGE, + number=2, + message="Report.Value", + ) + + class Filter(proto.Message): + r"""A filter over one or more fields. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field_filter (google.ads.admanager_v1.types.Report.Filter.FieldFilter): + A filter on a single field. + + This field is a member of `oneof`_ ``type``. + not_filter (google.ads.admanager_v1.types.Report.Filter): + A filter whose result is negated. + + This field is a member of `oneof`_ ``type``. + and_filter (google.ads.admanager_v1.types.Report.Filter.FilterList): + A list of filters whose results are AND-ed. + + This field is a member of `oneof`_ ``type``. + or_filter (google.ads.admanager_v1.types.Report.Filter.FilterList): + A list of filters whose results are OR-ed. + + This field is a member of `oneof`_ ``type``. + """ + + class Operation(proto.Enum): + r"""Supported filter operations. + + Values: + IN (0): + For scalar operands, checks if the operand is + in the set of provided filter values. + + For list operands, checks if any element in the + operand is in the set of provided filter values. + + Default value. + NOT_IN (1): + For scalar operands, checks that the operand + is not in the set of provided filter values. + + For list operands, checks that none of the + elements in the operand is in the set of + provided filter values. + CONTAINS (2): + For scalar string operands, checks if the + operand contains any of the provided filter + substrings. + + For string list operands, checks if any string + in the operand contains any of the provided + filter substrings. + NOT_CONTAINS (3): + For scalar string operands, checks that the + operand contains none of the provided filter + substrings. + + For string list operands, checks that none of + the strings in the operand contain none of the + provided filter substrings. + LESS_THAN (4): + Operand is less than the provided filter + value. + LESS_THAN_EQUALS (5): + Operand is less than or equal to provided + filter value. + GREATER_THAN (6): + Operand is greater than provided filter + value. + GREATER_THAN_EQUALS (7): + Operand is greater than or equal to provided + filter value. + BETWEEN (8): + Operand is between provided filter values. + MATCHES (9): + Operand matches against a regex or set of + regexes (one must match) + NOT_MATCHES (10): + Operand negative matches against a regex or + set of regexes (none must match) + """ + IN = 0 + NOT_IN = 1 + CONTAINS = 2 + NOT_CONTAINS = 3 + LESS_THAN = 4 + LESS_THAN_EQUALS = 5 + GREATER_THAN = 6 + GREATER_THAN_EQUALS = 7 + BETWEEN = 8 + MATCHES = 9 + NOT_MATCHES = 10 + + class FieldFilter(proto.Message): + r"""A filter on a specific field. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.ads.admanager_v1.types.Report.Field): + Required. The field to filter on. + operation (google.ads.admanager_v1.types.Report.Filter.Operation): + Required. The operation of this filter. + values (MutableSequence[google.ads.admanager_v1.types.Report.Value]): + Required. Values to filter to. + slice_ (google.ads.admanager_v1.types.Report.Slice): + Optional. Use to filter on a specific slice + of data. + + This field is a member of `oneof`_ ``_slice``. + time_period_index (int): + Optional. When using time period columns, use + this to filter on a specific column. + + This field is a member of `oneof`_ ``_time_period_index``. + metric_value_type (google.ads.admanager_v1.types.Report.MetricValueType): + Optional. Use to specify which metric value + type to filter on. Defaults to PRIMARY. + + This field is a member of `oneof`_ ``_metric_value_type``. + """ + + field: "Report.Field" = proto.Field( + proto.MESSAGE, + number=1, + message="Report.Field", + ) + operation: "Report.Filter.Operation" = proto.Field( + proto.ENUM, + number=2, + enum="Report.Filter.Operation", + ) + values: MutableSequence["Report.Value"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Report.Value", + ) + slice_: "Report.Slice" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="Report.Slice", + ) + time_period_index: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + metric_value_type: "Report.MetricValueType" = proto.Field( + proto.ENUM, + number=6, + optional=True, + enum="Report.MetricValueType", + ) + + class FilterList(proto.Message): + r"""A list of filters. + + Attributes: + filters (MutableSequence[google.ads.admanager_v1.types.Report.Filter]): + Required. A list of filters. + """ + + filters: MutableSequence["Report.Filter"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Report.Filter", + ) + + field_filter: "Report.Filter.FieldFilter" = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message="Report.Filter.FieldFilter", + ) + not_filter: "Report.Filter" = proto.Field( + proto.MESSAGE, + number=2, + oneof="type", + message="Report.Filter", + ) + and_filter: "Report.Filter.FilterList" = proto.Field( + proto.MESSAGE, + number=3, + oneof="type", + message="Report.Filter.FilterList", + ) + or_filter: "Report.Filter.FilterList" = proto.Field( + proto.MESSAGE, + number=4, + oneof="type", + message="Report.Filter.FilterList", + ) + + class Flag(proto.Message): + r"""A flag for a report. Flags are used show if certain thresholds are + met. Result rows that match the filter will have the corresponding + [MetricValueGroup.flagValues][MetricValueGroup] index set to true. + For more information about flags see: + https://support.google.com/admanager/answer/15079975 + + Attributes: + filters (MutableSequence[google.ads.admanager_v1.types.Report.Filter]): + Required. Filters to apply for the flag. + name (str): + Optional. Name of the flag. + The flag names RED, YELLOW, GREEN, BLUE, PURPLE, + and GREY correspond to the colored flags that + appear in the UI. The UI will not display flags + with other names, but they are available for use + by API clients. + """ + + filters: MutableSequence["Report.Filter"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Report.Filter", + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + + class DateRange(proto.Message): + r"""A date range for a report. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fixed (google.ads.admanager_v1.types.Report.DateRange.FixedDateRange): + A fixed date range. + + This field is a member of `oneof`_ ``date_range_type``. + relative (google.ads.admanager_v1.types.Report.DateRange.RelativeDateRange): + A relative date range. + + This field is a member of `oneof`_ ``date_range_type``. + """ + + class RelativeDateRange(proto.Enum): + r"""Options for relative date ranges. + + Values: + RELATIVE_DATE_RANGE_UNSPECIFIED (0): + Default value. This value is unused. + TODAY (1): + The date the report is run. + YESTERDAY (2): + The date a day before the date that the + report is run. + THIS_WEEK (3): + The full week in which this report is run. + Could include dates in the future. + THIS_WEEK_TO_DATE (29): + From the beginning of the calendar week + (Monday to Sunday) in which the up to and + including the day the report is run. + THIS_MONTH (4): + The full month in which this report is run. + Could include dates in the future. + THIS_MONTH_TO_DATE (26): + From the beginning of the calendar month in + which the report is run, to up to and including + the day the report is run. + THIS_QUARTER (5): + The full quarter in which this report is run. + Could include dates in the future. + THIS_QUARTER_TO_DATE (27): + From the beginning of the calendar quarter in + which the report is run, up to and including the + day the report is run. + THIS_YEAR (6): + The full year in which this report is run. + Could include dates in the future. + THIS_YEAR_TO_DATE (28): + From the beginning of the calendar year in + which the report is run, to up to and including + the day the report is run. + LAST_WEEK (7): + The entire previous calendar week, Monday to + Sunday (inclusive), preceding the calendar week + the report is run. + LAST_MONTH (8): + The entire previous calendar month preceding + the calendar month the report is run. + LAST_QUARTER (9): + The entire previous calendar quarter + preceding the calendar quarter the report is + run. + LAST_YEAR (10): + The entire previous calendar year preceding + the calendar year the report is run. + LAST_7_DAYS (11): + The 7 days preceding the day the report is + run. + LAST_30_DAYS (12): + The 30 days preceding the day the report is + run. + LAST_60_DAYS (13): + The 60 days preceding the day the report is + run. + LAST_90_DAYS (14): + The 90 days preceding the day the report is + run. + LAST_180_DAYS (15): + The 180 days preceding the day the report is + run. + LAST_360_DAYS (16): + The 360 days preceding the day the report is + run. + LAST_365_DAYS (17): + The 365 days preceding the day the report is + run. + LAST_3_MONTHS (18): + The entire previous 3 calendar months + preceding the calendar month the report is run. + LAST_6_MONTHS (19): + The entire previous 6 calendar months + preceding the calendar month the report is run. + LAST_12_MONTHS (20): + The entire previous 6 calendar months + preceding the calendar month the report is run. + ALL_AVAILABLE (21): + From 3 years before the report is run, to the + day before the report is run, inclusive. + PREVIOUS_PERIOD (22): + Only valid when used in the comparison_date_range field. The + complete period preceding the date period provided in + date_range. + + In the case where date_range is a FixedDateRange of N days, + this will be a period of N days where the end date is the + date preceding the start date of the date_range. + + In the case where date_range is a RelativeDateRange, this + will be a period of the same time frame preceding the + date_range. In the case where the date_range does not + capture the full period because a report is run in the + middle of that period, this will still be the full preceding + period. For example, if date_range is THIS_WEEK, but the + report is run on a Wednesday, THIS_WEEK will be Monday - + Wednesday, but PREVIOUS_PERIOD will be Monday - Sunday. + SAME_PERIOD_PREVIOUS_YEAR (24): + Only valid when used in the comparison_date_range field. The + period starting 1 year prior to the date period provided in + date_range. + + In the case where date_range is a FixedDateRange, this will + be a date range starting 1 year prior to the date_range + start date and ending 1 year prior to the date_range end + date. + + In the case where date_range is a RelativeDateRange, this + will be a period of the same time frame exactly 1 year prior + to the date_range. In the case where the date_range does not + capture the full period because a report is run in the + middle of that period, this will still be the full period 1 + year prior. For example, if date range is THIS_WEEK, but the + report is run on a Wednesday, THIS_WEEK will be Monday - + Wednesday, but SAME_PERIOD_PREVIOUS_YEAR will be Monday - + Sunday. + """ + RELATIVE_DATE_RANGE_UNSPECIFIED = 0 + TODAY = 1 + YESTERDAY = 2 + THIS_WEEK = 3 + THIS_WEEK_TO_DATE = 29 + THIS_MONTH = 4 + THIS_MONTH_TO_DATE = 26 + THIS_QUARTER = 5 + THIS_QUARTER_TO_DATE = 27 + THIS_YEAR = 6 + THIS_YEAR_TO_DATE = 28 + LAST_WEEK = 7 + LAST_MONTH = 8 + LAST_QUARTER = 9 + LAST_YEAR = 10 + LAST_7_DAYS = 11 + LAST_30_DAYS = 12 + LAST_60_DAYS = 13 + LAST_90_DAYS = 14 + LAST_180_DAYS = 15 + LAST_360_DAYS = 16 + LAST_365_DAYS = 17 + LAST_3_MONTHS = 18 + LAST_6_MONTHS = 19 + LAST_12_MONTHS = 20 + ALL_AVAILABLE = 21 + PREVIOUS_PERIOD = 22 + SAME_PERIOD_PREVIOUS_YEAR = 24 + + class FixedDateRange(proto.Message): + r"""A date range between two fixed dates (inclusive of end date). + + Attributes: + start_date (google.type.date_pb2.Date): + Required. The start date of this date range. + end_date (google.type.date_pb2.Date): + Required. The end date (inclusive) of this + date range. + """ + + start_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + end_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=2, + message=date_pb2.Date, + ) + + fixed: "Report.DateRange.FixedDateRange" = proto.Field( + proto.MESSAGE, + number=1, + oneof="date_range_type", + message="Report.DateRange.FixedDateRange", + ) + relative: "Report.DateRange.RelativeDateRange" = proto.Field( + proto.ENUM, + number=2, + oneof="date_range_type", + enum="Report.DateRange.RelativeDateRange", + ) + name: str = proto.Field( proto.STRING, number=1, ) + report_id: int = proto.Field( + proto.INT64, + number=3, + ) + visibility: Visibility = proto.Field( + proto.ENUM, + number=2, + enum=Visibility, + ) + report_definition: "ReportDefinition" = proto.Field( + proto.MESSAGE, + number=4, + message="ReportDefinition", + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + locale: str = proto.Field( + proto.STRING, + number=8, + ) + schedule_options: "ScheduleOptions" = proto.Field( + proto.MESSAGE, + number=9, + message="ScheduleOptions", + ) -class ExportSavedReportRequest(proto.Message): - r"""Request proto for the configuration of a report run. +class RunReportRequest(proto.Message): + r"""Request message for a running a report. Attributes: - report (str): - The name of a particular saved report resource. - - A report will be run based on the specification of this - saved report. It must have the format of - "networks/{network_code}/reports/{report_id}". - format_ (google.ads.admanager_v1.types.ExportSavedReportRequest.Format): - Required. The export format requested. - include_report_properties (bool): - Whether or not to include the report - properties (e.g. network, user, date - generated...) in the generated report. - include_ids (bool): - Whether or not to include the IDs if there - are any (e.g. advertiser ID, order ID...) - present in the report. - include_totals_row (bool): - Whether or not to include a row containing - metric totals. - file_name (str): - The file name of report download. The file extension is - determined by export_format and gzip_compressed. - - Defaults to "DFP Report" if not specified. + name (str): + Required. The report to run. Format: + ``networks/{network_code}/reports/{report_id}`` """ - class Format(proto.Enum): - r"""Supported file formats. + name: str = proto.Field( + proto.STRING, + number=1, + ) + - Values: - FORMAT_UNSPECIFIED (0): - Default value. This value is unused. - CSV_DUMP (2): - Comma separated values meant to be used by - automated machine processing. - - Unlike other formats, the output is not - localized and there is no totals row by default. - XLSX (5): - The report file is generated as an Office - Open XML spreadsheet designed for Excel 2007+. - XML (6): - The report is generated as XML. - """ - FORMAT_UNSPECIFIED = 0 - CSV_DUMP = 2 - XLSX = 5 - XML = 6 +class RunReportMetadata(proto.Message): + r"""``RunReport`` operation metadata. + Attributes: + percent_complete (int): + An estimate of how close this report is to + being completed. Will always be 100 for failed + and completed reports. + report (str): + The result's parent report. + """ + + percent_complete: int = proto.Field( + proto.INT32, + number=2, + ) report: str = proto.Field( + proto.STRING, + number=4, + ) + + +class RunReportResponse(proto.Message): + r"""Response message for a completed ``RunReport`` operation. + + Attributes: + report_result (str): + The unique name of the generated result. Use with + ``FetchReportResultRows`` to retrieve data. + """ + + report_result: str = proto.Field( proto.STRING, number=1, ) - format_: Format = proto.Field( - proto.ENUM, + + +class GetReportRequest(proto.Message): + r"""Request object for ``GetReport`` method. + + Attributes: + name (str): + Required. The resource name of the report. Format: + ``networks/{network_code}/reports/{report_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListReportsRequest(proto.Message): + r"""Request object for ``ListReports`` method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of reports. + Format: ``networks/{network_code}`` + page_size (int): + Optional. The maximum number of ``Reports`` to return. The + service may return fewer than this value. If unspecified, at + most 50 ``Reports`` will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListReports`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListReports`` must match the call that provided the page + token. + filter (str): + Optional. Expression to filter the response. + See syntax details at + https://developers.google.com/ad-manager/api/beta/filters + order_by (str): + Optional. Expression to specify sorting + order. See syntax details at + https://developers.google.com/ad-manager/api/beta/filters#order + skip (int): + Optional. Number of individual resources to + skip while paginating. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + skip: int = proto.Field( + proto.INT32, + number=6, + ) + + +class ListReportsResponse(proto.Message): + r"""Response object for ``ListReportsResponse`` containing matching + ``Report`` objects. + + Attributes: + reports (MutableSequence[google.ads.admanager_v1.types.Report]): + The ``Report`` objects from the specified network. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + Total number of ``Report`` objects. If a filter was included + in the request, this reflects the total number after the + filtering is applied. + + ``total_size`` will not be calculated in the response unless + it has been included in a response field mask. The response + field mask can be provided to the method by using the URL + parameter ``$fields`` or ``fields``, or by using the + HTTP/gRPC header ``X-Goog-FieldMask``. + + For more information, see + https://developers.google.com/ad-manager/api/beta/field-masks + """ + + @property + def raw_page(self): + return self + + reports: MutableSequence["Report"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Report", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class CreateReportRequest(proto.Message): + r"""Request object for ``CreateReport`` method. + + Attributes: + parent (str): + Required. The parent resource where this ``Report`` will be + created. Format: ``networks/{network_code}`` + report (google.ads.admanager_v1.types.Report): + Required. The ``Report`` to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + report: "Report" = proto.Field( + proto.MESSAGE, + number=2, + message="Report", + ) + + +class UpdateReportRequest(proto.Message): + r"""Request object for ``UpdateReport`` method. + + Attributes: + report (google.ads.admanager_v1.types.Report): + Required. The ``Report`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + report: "Report" = proto.Field( + proto.MESSAGE, + number=1, + message="Report", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class FetchReportResultRowsRequest(proto.Message): + r"""The request message for the fetch report result rows + endpoint. + + Attributes: + name (str): + The report result being fetched. Format: + ``networks/{network_code}/reports/{report_id}/results/{report_result_id}`` + page_size (int): + Optional. The maximum number of rows to + return. The service may return fewer than this + value. If unspecified, at most 1,000 rows will + be returned. The maximum value is 10,000; values + above 10,000 will be reduced to 10,000. + page_token (str): + Optional. A page token, received from a previous + ``FetchReportResultRows`` call. Provide this to retrieve the + second and subsequent batches of rows. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchReportResultRowsResponse(proto.Message): + r"""The response message for the fetch report result rows + endpoint. + + Attributes: + rows (MutableSequence[google.ads.admanager_v1.types.Report.DataTable.Row]): + Up to ``page_size`` rows of report data. + run_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the report was scheduled to + run. For non-scheduled reports, this is the time + at which the report was requested to be run. + date_ranges (MutableSequence[google.ads.admanager_v1.types.Report.DateRange.FixedDateRange]): + The computed fixed date ranges this report includes. Only + returned with the first page of results (when page_token is + not included in the request). + comparison_date_ranges (MutableSequence[google.ads.admanager_v1.types.Report.DateRange.FixedDateRange]): + The computed comparison fixed date ranges this report + includes. Only returned with the first page of results (when + page_token is not included in the request). + total_row_count (int): + The total number of rows available from this report. Useful + for pagination. Only returned with the first page of results + (when page_token is not included in the request). + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + rows: MutableSequence["Report.DataTable.Row"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Report.DataTable.Row", + ) + run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, number=2, - enum=Format, + message=timestamp_pb2.Timestamp, ) - include_report_properties: bool = proto.Field( - proto.BOOL, + date_ranges: MutableSequence[ + "Report.DateRange.FixedDateRange" + ] = proto.RepeatedField( + proto.MESSAGE, number=3, + message="Report.DateRange.FixedDateRange", ) - include_ids: bool = proto.Field( - proto.BOOL, + comparison_date_ranges: MutableSequence[ + "Report.DateRange.FixedDateRange" + ] = proto.RepeatedField( + proto.MESSAGE, number=4, + message="Report.DateRange.FixedDateRange", ) - include_totals_row: bool = proto.Field( - proto.BOOL, + total_row_count: int = proto.Field( + proto.INT32, number=5, ) - file_name: str = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=6, ) -class ExportSavedReportMetadata(proto.Message): - r"""The message stored in the - google.longrunning.Operation.metadata field. Contains metadata - regarding this execution. +class ReportDefinition(proto.Message): + r"""The definition of how a report should be run. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - result_id (int): - The result generated in this report run. + dimensions (MutableSequence[google.ads.admanager_v1.types.Report.Dimension]): + Required. The list of dimensions to report + on. If empty, the report will have no + dimensions, and any metrics will be totals. + metrics (MutableSequence[google.ads.admanager_v1.types.Report.Metric]): + Required. The list of metrics to report on. + If empty, the report will have no metrics. + filters (MutableSequence[google.ads.admanager_v1.types.Report.Filter]): + Optional. The filters for this report. + time_zone (str): + Optional. The time zone the date range is defined in for + this report. Defaults to publisher's time zone if not + specified. Time zone in IANA format. Acceptable values + depend on the report type. Publisher time zone is always + accepted. Use "America/Los_Angeles" for pacific time, or + "Etc/UTC" for UTC. + currency_code (str): + Optional. The ISO 4217 currency code for this + report. Defaults to publisher currency code if + not specified. + date_range (google.ads.admanager_v1.types.Report.DateRange): + Required. The primary date range of this + report. + comparison_date_range (google.ads.admanager_v1.types.Report.DateRange): + Optional. The comparison date range of this + report. If unspecified, the report will not have + any comparison metrics. + + This field is a member of `oneof`_ ``_comparison_date_range``. + custom_dimension_key_ids (MutableSequence[int]): + Optional. Custom Dimension keys that represent + `CUSTOM_DIMENSION_*` dimensions. The index of this repeated + field corresponds to the index on each dimension. For + example, custom_dimension_key_ids[0] describes + CUSTOM_DIMENSION_0_VALUE_ID and CUSTOM_DIMENSION_0_VALUE. + line_item_custom_field_ids (MutableSequence[int]): + Optional. Custom field IDs that represent + `LINE_ITEM_CUSTOM_FIELD_*` dimensions. The index of this + repeated field corresponds to the index on each dimension. + For example, line_item_custom_field_ids[0] describes + LINE_ITEM_CUSTOM_FIELD_0_OPTION_ID and + LINE_ITEM_CUSTOM_FIELD_0_VALUE. + order_custom_field_ids (MutableSequence[int]): + Optional. Custom field IDs that represent + `ORDER_CUSTOM_FIELD_*` dimensions. The index of this repeated + field corresponds to the index on each dimension. For + example, order_custom_field_ids[0] describes + ORDER_CUSTOM_FIELD_0_OPTION_ID and + ORDER_CUSTOM_FIELD_0_VALUE. + creative_custom_field_ids (MutableSequence[int]): + Optional. Custom field IDs that represent + `CREATIVE_CUSTOM_FIELD_*` dimensions. The index of this + repeated field corresponds to the index on each dimension. + For example, creative_custom_field_ids[0] describes + CREATIVE_CUSTOM_FIELD_0_OPTION_ID and + CREATIVE_CUSTOM_FIELD_0_VALUE. + report_type (google.ads.admanager_v1.types.Report.ReportType): + Required. The type of this report. + time_period_column (google.ads.admanager_v1.types.Report.TimePeriodColumn): + Optional. Include a time period column to introduce + comparison columns in the report for each generated period. + For example, set to "QUARTERS" here to have a column for + each quarter present in the primary date range. If "PREVIOUS + PERIOD" is specified in comparison_date_range, then each + quarter column will also include comparison values for its + relative previous quarter. + flags (MutableSequence[google.ads.admanager_v1.types.Report.Flag]): + Optional. List of flags for this report. Used + to flag rows in a result set based on a set of + defined filters. + sorts (MutableSequence[google.ads.admanager_v1.types.Report.Sort]): + Optional. Default sorts to apply to this + report. """ - result_id: int = proto.Field( + dimensions: MutableSequence["Report.Dimension"] = proto.RepeatedField( + proto.ENUM, + number=1, + enum="Report.Dimension", + ) + metrics: MutableSequence["Report.Metric"] = proto.RepeatedField( + proto.ENUM, + number=2, + enum="Report.Metric", + ) + filters: MutableSequence["Report.Filter"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Report.Filter", + ) + time_zone: str = proto.Field( + proto.STRING, + number=4, + ) + currency_code: str = proto.Field( + proto.STRING, + number=5, + ) + date_range: "Report.DateRange" = proto.Field( + proto.MESSAGE, + number=6, + message="Report.DateRange", + ) + comparison_date_range: "Report.DateRange" = proto.Field( + proto.MESSAGE, + number=9, + optional=True, + message="Report.DateRange", + ) + custom_dimension_key_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=7, + ) + line_item_custom_field_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=11, + ) + order_custom_field_ids: MutableSequence[int] = proto.RepeatedField( proto.INT64, + number=12, + ) + creative_custom_field_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=13, + ) + report_type: "Report.ReportType" = proto.Field( + proto.ENUM, + number=8, + enum="Report.ReportType", + ) + time_period_column: "Report.TimePeriodColumn" = proto.Field( + proto.ENUM, + number=10, + enum="Report.TimePeriodColumn", + ) + flags: MutableSequence["Report.Flag"] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="Report.Flag", + ) + sorts: MutableSequence["Report.Sort"] = proto.RepeatedField( + proto.MESSAGE, + number=15, + message="Report.Sort", + ) + + +class ScheduleOptions(proto.Message): + r"""The options for a scheduled report. + + Attributes: + schedule (google.ads.admanager_v1.types.Schedule): + Information pertaining to schedule itself. + delivery_condition (google.ads.admanager_v1.types.ScheduleOptions.DeliveryCondition): + Option for when to deliver the scheduled + report. + flags (MutableSequence[google.ads.admanager_v1.types.Report.Flag]): + Optional. The flags evaluated when + ReportDeliveryOption.WHEN_FLAG_PRESENT is specified. + """ + + class DeliveryCondition(proto.Enum): + r"""Condition for when to email the scheduled report. + + Values: + NEVER (0): + Never deliver report. + ALWAYS (1): + Always deliver report. + WHEN_FLAG_CONDITIONS_MET (2): + Deliver report when flag's conditions are + met. + """ + NEVER = 0 + ALWAYS = 1 + WHEN_FLAG_CONDITIONS_MET = 2 + + schedule: "Schedule" = proto.Field( + proto.MESSAGE, number=1, + message="Schedule", ) + delivery_condition: DeliveryCondition = proto.Field( + proto.ENUM, + number=2, + enum=DeliveryCondition, + ) + flags: MutableSequence["Report.Flag"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Report.Flag", + ) + + +class Schedule(proto.Message): + r"""The schedule for the report + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. -class ExportSavedReportResponse(proto.Message): - r"""Message included in the longrunning Operation result.response - field when the report completes successfully. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - download_url (str): - The link to the exported file. + weekly_schedule (google.ads.admanager_v1.types.Schedule.WeeklySchedule): + Days of week to schedule report run. + + This field is a member of `oneof`_ ``frequency_schedule``. + monthly_schedule (google.ads.admanager_v1.types.Schedule.MonthlySchedule): + Days of month to schedule report run. + + This field is a member of `oneof`_ ``frequency_schedule``. + start_date (google.type.date_pb2.Date): + Date for the first run of the report. + end_date (google.type.date_pb2.Date): + Date for the final run of the report. + frequency (google.ads.admanager_v1.types.Schedule.Frequency): + Frequency to run report. + start_time (google.type.timeofday_pb2.TimeOfDay): + Indicates start time for schedule to run Will use the + time_zone from ``ReportDefinition``. Defaults to the + publisher's time zone if not specified. + + For HOURLY, TWO_TIMES_DAILY, THREE_TIMES_DAILY, or + FOUR_TIMES_DAILY, this will be the time of day that the + first report will run on the first day. For example, if the + start time is 2:00 PM, and the frequency is + THREE_TIMES_DAILY, the first day will have reports scheduled + at 2:00 PM, 10:00 PM. Each subsequent day will have reports + scheduled at 6:00 AM, 2:00 PM, 10:00 PM. """ - download_url: str = proto.Field( - proto.STRING, + class Frequency(proto.Enum): + r"""Frequency to run report. + + Values: + FREQUENCY_UNSPECIFIED (0): + No Frequency specified. + HOURLY (1): + Schedule report to run every hour. + TWO_TIMES_DAILY (2): + Schedule report to run twice a day (every 12 + hours). + THREE_TIMES_DAILY (3): + Schedule report to run three times a day + (every 8 hours). + FOUR_TIMES_DAILY (4): + Schedule report to run four times a day + (every 6 hours). + DAILY (5): + Schedule report to run on a daily basis. + WEEKLY (6): + Schedule report to run on a weekly basis. + MONTHLY (7): + Schedule report to run on a monthly basis. + """ + FREQUENCY_UNSPECIFIED = 0 + HOURLY = 1 + TWO_TIMES_DAILY = 2 + THREE_TIMES_DAILY = 3 + FOUR_TIMES_DAILY = 4 + DAILY = 5 + WEEKLY = 6 + MONTHLY = 7 + + class WeeklySchedule(proto.Message): + r"""Days of week to schedule report run. + + Attributes: + weekly_scheduled_days (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Specifies days of the week on which to run + report. + """ + + weekly_scheduled_days: MutableSequence[ + dayofweek_pb2.DayOfWeek + ] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + + class MonthlySchedule(proto.Message): + r"""Days of Month to schedule report run. + + Attributes: + monthly_scheduled_days (MutableSequence[int]): + Specifies days of the month to run report. + Range is from 1-31. Will ignore days that are + not valid for the given month. + """ + + monthly_scheduled_days: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=1, + ) + + weekly_schedule: WeeklySchedule = proto.Field( + proto.MESSAGE, + number=6, + oneof="frequency_schedule", + message=WeeklySchedule, + ) + monthly_schedule: MonthlySchedule = proto.Field( + proto.MESSAGE, + number=7, + oneof="frequency_schedule", + message=MonthlySchedule, + ) + start_date: date_pb2.Date = proto.Field( + proto.MESSAGE, number=1, + message=date_pb2.Date, + ) + end_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=2, + message=date_pb2.Date, + ) + frequency: Frequency = proto.Field( + proto.ENUM, + number=3, + enum=Frequency, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=4, + message=timeofday_pb2.TimeOfDay, ) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/role_enums.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/role_enums.py new file mode 100644 index 000000000000..a872c78e51e8 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/role_enums.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "RoleStatusEnum", + }, +) + + +class RoleStatusEnum(proto.Message): + r"""Wrapper message for + [RoleStatus][google.ads.admanager.v1.RoleStatusEnum.RoleStatus] + + """ + + class RoleStatus(proto.Enum): + r"""The status of the role. + + Values: + ROLE_STATUS_UNSPECIFIED (0): + No value specified. + ACTIVE (1): + Role is active. + INACTIVE (2): + Role is inactive. + """ + ROLE_STATUS_UNSPECIFIED = 0 + ACTIVE = 1 + INACTIVE = 2 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/role_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/role_messages.py new file mode 100644 index 000000000000..49acac6291d4 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/role_messages.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.admanager_v1.types import role_enums + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Role", + }, +) + + +class Role(proto.Message): + r"""The ``Role`` resource. + + Attributes: + name (str): + Identifier. The resource name of the ``Role``. Format: + ``networks/{network_code}/roles/{role_id}`` + role_id (int): + Output only. ``Role`` ID. + display_name (str): + Required. The display name of the ``Role``. + description (str): + Optional. The description of the ``Role``. + built_in (bool): + Output only. Whether the ``Role`` is a built-in or custom + user role. + status (google.ads.admanager_v1.types.RoleStatusEnum.RoleStatus): + Output only. The status of the ``Role``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + role_id: int = proto.Field( + proto.INT64, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + built_in: bool = proto.Field( + proto.BOOL, + number=5, + ) + status: role_enums.RoleStatusEnum.RoleStatus = proto.Field( + proto.ENUM, + number=6, + enum=role_enums.RoleStatusEnum.RoleStatus, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/role_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/role_service.py index ee18f5215bf8..8d6d40291cc2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/role_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/role_service.py @@ -19,10 +19,11 @@ import proto # type: ignore +from google.ads.admanager_v1.types import role_messages + __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "Role", "GetRoleRequest", "ListRolesRequest", "ListRolesResponse", @@ -30,23 +31,8 @@ ) -class Role(proto.Message): - r"""The Role resource. - - Attributes: - name (str): - Identifier. The resource name of the Role. Format: - ``networks/{network_code}/roles/{role_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - class GetRoleRequest(proto.Message): - r"""Request object for GetRole method. + r"""Request object for ``GetRole`` method. Attributes: name (str): @@ -61,18 +47,17 @@ class GetRoleRequest(proto.Message): class ListRolesRequest(proto.Message): - r"""Request object for ListRoles method. + r"""Request object for ``ListRoles`` method. Attributes: parent (str): Required. The parent, which owns this collection of Roles. Format: ``networks/{network_code}`` page_size (int): - Optional. The maximum number of Roles to - return. The service may return fewer than this - value. If unspecified, at most 50 roles will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. + Optional. The maximum number of ``Roles`` to return. The + service may return fewer than this value. If unspecified, at + most 50 ``Roles`` will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. page_token (str): Optional. A page token, received from a previous ``ListRoles`` call. Provide this to retrieve the subsequent @@ -121,20 +106,20 @@ class ListRolesRequest(proto.Message): class ListRolesResponse(proto.Message): - r"""Response object for ListRolesRequest containing matching Role - resources. + r"""Response object for ``ListRolesRequest`` containing matching + ``Role`` objects. Attributes: roles (MutableSequence[google.ads.admanager_v1.types.Role]): - The Role from the specified network. + The ``Role`` objects from the specified network. next_page_token (str): A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. total_size (int): - Total number of Roles. If a filter was included in the - request, this reflects the total number after the filtering - is applied. + Total number of ``Role`` objects. If a filter was included + in the request, this reflects the total number after the + filtering is applied. ``total_size`` will not be calculated in the response unless it has been included in a response field mask. The response @@ -150,10 +135,10 @@ class ListRolesResponse(proto.Message): def raw_page(self): return self - roles: MutableSequence["Role"] = proto.RepeatedField( + roles: MutableSequence[role_messages.Role] = proto.RepeatedField( proto.MESSAGE, number=1, - message="Role", + message=role_messages.Role, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/size.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/size.py index 5f552ab7d794..33b7dcb841b8 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/size.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/size.py @@ -19,11 +19,12 @@ import proto # type: ignore +from google.ads.admanager_v1.types import size_type_enum + __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ "Size", - "SizeTypeEnum", }, ) @@ -58,59 +59,11 @@ class Size(proto.Message): proto.INT32, number=2, ) - size_type: "SizeTypeEnum.SizeType" = proto.Field( + size_type: size_type_enum.SizeTypeEnum.SizeType = proto.Field( proto.ENUM, number=3, - enum="SizeTypeEnum.SizeType", + enum=size_type_enum.SizeTypeEnum.SizeType, ) -class SizeTypeEnum(proto.Message): - r"""Wrapper message for - [SizeType][google.ads.admanager.v1.SizeTypeEnum.SizeType]. - - """ - - class SizeType(proto.Enum): - r"""The different Size types for an ad. - - Values: - SIZE_TYPE_UNSPECIFIED (0): - Default value. This value is unused. - PIXEL (1): - Dimension based size, an actual height and - width in pixels. - ASPECT_RATIO (2): - Size is expressed as a ratio. For example, - 4:1 could be met by a 100 x 25 sized image. - INTERSTITIAL (3): - Out-of-page (Interstitial) size that is not - related to the slot it is served. This must be - used with 1x1 size. - IGNORED (4): - Size is ignored. This must be used with 1x1 - size. - NATIVE (5): - Native size, which is a function of the how - the client renders the creative. This must be - used with 1x1 size. - FLUID (6): - Fluid size. Automatically sizes the ad by - filling the width of the enclosing column and - adjusting the height as appropriate. This must - be used with 1x1 size. - AUDIO (7): - Audio size. Used with audio ads. This must be - used with 1x1 size. - """ - SIZE_TYPE_UNSPECIFIED = 0 - PIXEL = 1 - ASPECT_RATIO = 2 - INTERSTITIAL = 3 - IGNORED = 4 - NATIVE = 5 - FLUID = 6 - AUDIO = 7 - - __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/size_type_enum.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/size_type_enum.py new file mode 100644 index 000000000000..8ad905e923a9 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/size_type_enum.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "SizeTypeEnum", + }, +) + + +class SizeTypeEnum(proto.Message): + r"""Wrapper message for + [SizeType][google.ads.admanager.v1.SizeTypeEnum.SizeType]. + + """ + + class SizeType(proto.Enum): + r"""The different Size types for an ad. + + Values: + SIZE_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + PIXEL (1): + Dimension based size, an actual height and + width in pixels. + ASPECT_RATIO (2): + Size is expressed as a ratio. For example, + 4:1 could be met by a 100 x 25 sized image. + INTERSTITIAL (3): + Out-of-page (Interstitial) size that is not + related to the slot it is served. This must be + used with 1x1 size. + IGNORED (4): + Size is ignored. This must be used with 1x1 + size. + NATIVE (5): + Native size, which is a function of the how + the client renders the creative. This must be + used with 1x1 size. + FLUID (6): + Fluid size. Automatically sizes the ad by + filling the width of the enclosing column and + adjusting the height as appropriate. This must + be used with 1x1 size. + AUDIO (7): + Audio size. Used with audio ads. This must be + used with 1x1 size. + """ + SIZE_TYPE_UNSPECIFIED = 0 + PIXEL = 1 + ASPECT_RATIO = 2 + INTERSTITIAL = 3 + IGNORED = 4 + NATIVE = 5 + FLUID = 6 + AUDIO = 7 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_messages.py new file mode 100644 index 000000000000..642311d83701 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_messages.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.admanager_v1.types import taxonomy_type_enum + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "TaxonomyCategory", + }, +) + + +class TaxonomyCategory(proto.Message): + r"""The ``TaxonomyCategory`` resource. + + Attributes: + name (str): + Identifier. The resource name of the ``TaxonomyCategory``. + Format: + ``networks/{network_code}/taxonomyCategories/{taxonomy_category_id}`` + taxonomy_category_id (int): + Output only. ``TaxonomyCategory`` ID. + display_name (str): + Output only. Display name of the ``TaxonomyCategory``. + grouping_only (bool): + Output only. Whether this ``TaxonomyCategory`` only serves + to group its children. + parent_taxonomy_category_id (int): + Output only. The ID of the parent category this + ``TaxonomyCategory`` descends from. + taxonomy_type (google.ads.admanager_v1.types.TaxonomyTypeEnum.TaxonomyType): + Output only. The taxonomy that this ``TaxonomyCategory`` + belongs to. + ancestor_names (MutableSequence[str]): + Output only. The list of names of the ancestors of this + ``TaxonomyCategory``. + ancestor_taxonomy_category_ids (MutableSequence[int]): + Output only. The list of IDs of the ancestors of this + ``TaxonomyCategory``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + taxonomy_category_id: int = proto.Field( + proto.INT64, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + grouping_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + parent_taxonomy_category_id: int = proto.Field( + proto.INT64, + number=6, + ) + taxonomy_type: taxonomy_type_enum.TaxonomyTypeEnum.TaxonomyType = proto.Field( + proto.ENUM, + number=9, + enum=taxonomy_type_enum.TaxonomyTypeEnum.TaxonomyType, + ) + ancestor_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + ancestor_taxonomy_category_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=8, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_service.py similarity index 63% rename from packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_service.py rename to packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_service.py index 8604504f58e5..cc3cdbc83ccb 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/ad_partner_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_category_service.py @@ -19,39 +19,25 @@ import proto # type: ignore +from google.ads.admanager_v1.types import taxonomy_category_messages + __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "AdPartner", - "GetAdPartnerRequest", - "ListAdPartnersRequest", - "ListAdPartnersResponse", + "GetTaxonomyCategoryRequest", + "ListTaxonomyCategoriesRequest", + "ListTaxonomyCategoriesResponse", }, ) -class AdPartner(proto.Message): - r"""The AdPartner resource. - - Attributes: - name (str): - Identifier. The resource name of the AdPartner. Format: - ``networks/{network_code}/adPartners/{ad_partner_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetAdPartnerRequest(proto.Message): - r"""Request object for GetAdPartner method. +class GetTaxonomyCategoryRequest(proto.Message): + r"""Request object for ``GetTaxonomyCategory`` method. Attributes: name (str): - Required. The resource name of the AdPartner. Format: - ``networks/{network_code}/adPartners/{ad_partner_id}`` + Required. The resource name of the TaxonomyCategory. Format: + ``networks/{network_code}/taxonomyCategories/{taxonomy_category_id}`` """ name: str = proto.Field( @@ -60,27 +46,27 @@ class GetAdPartnerRequest(proto.Message): ) -class ListAdPartnersRequest(proto.Message): - r"""Request object for ListAdPartners method. +class ListTaxonomyCategoriesRequest(proto.Message): + r"""Request object for ``ListTaxonomyCategories`` method. Attributes: parent (str): Required. The parent, which owns this collection of - AdPartners. Format: ``networks/{network_code}`` + TaxonomyCategories. Format: ``networks/{network_code}`` page_size (int): - Optional. The maximum number of AdPartners to - return. The service may return fewer than this - value. If unspecified, at most 50 AdPartners - will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. + Optional. The maximum number of ``TaxonomyCategories`` to + return. The service may return fewer than this value. If + unspecified, at most 50 ``TaxonomyCategories`` will be + returned. The maximum value is 1000; values above 1000 will + be coerced to 1000. page_token (str): Optional. A page token, received from a previous - ``ListAdPartners`` call. Provide this to retrieve the - subsequent page. + ``ListTaxonomyCategories`` call. Provide this to retrieve + the subsequent page. When paginating, all other parameters provided to - ``ListAdPartners`` must match the call that provided the - page token. + ``ListTaxonomyCategories`` must match the call that provided + the page token. filter (str): Optional. Expression to filter the response. See syntax details at @@ -120,21 +106,21 @@ class ListAdPartnersRequest(proto.Message): ) -class ListAdPartnersResponse(proto.Message): - r"""Response object for ListAdPartnersRequest containing matching - AdPartner resources. +class ListTaxonomyCategoriesResponse(proto.Message): + r"""Response object for ``ListTaxonomyCategoriesRequest`` containing + matching ``TaxonomyCategory`` objects. Attributes: - ad_partners (MutableSequence[google.ads.admanager_v1.types.AdPartner]): - The AdPartner from the specified network. + taxonomy_categories (MutableSequence[google.ads.admanager_v1.types.TaxonomyCategory]): + The ``TaxonomyCategory`` objects. next_page_token (str): A token, which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. total_size (int): - Total number of AdPartners. If a filter was included in the - request, this reflects the total number after the filtering - is applied. + Total number of ``TaxonomyCategory`` objects. If a filter + was included in the request, this reflects the total number + after the filtering is applied. ``total_size`` will not be calculated in the response unless it has been included in a response field mask. The response @@ -150,10 +136,12 @@ class ListAdPartnersResponse(proto.Message): def raw_page(self): return self - ad_partners: MutableSequence["AdPartner"] = proto.RepeatedField( + taxonomy_categories: MutableSequence[ + taxonomy_category_messages.TaxonomyCategory + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message="AdPartner", + message=taxonomy_category_messages.TaxonomyCategory, ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_type_enum.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_type_enum.py new file mode 100644 index 000000000000..c564f256d0ad --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/taxonomy_type_enum.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "TaxonomyTypeEnum", + }, +) + + +class TaxonomyTypeEnum(proto.Message): + r"""Wrapper for + [TaxonomyType][google.ads.admanager.v1.TaxonomyTypeEnum.TaxonomyType] + + """ + + class TaxonomyType(proto.Enum): + r"""The taxonomy type of the IAB defined taxonomies. + Used for Publisher provided signals. + + Values: + TAXONOMY_TYPE_UNSPECIFIED (0): + Unspecified/not present + TAXONOMY_IAB_AUDIENCE_1_1 (3): + The IAB Audience Taxonomy v1.1. + TAXONOMY_IAB_CONTENT_2_1 (4): + The IAB Content Taxonomy v2.1. + TAXONOMY_IAB_CONTENT_2_2 (6): + The IAB Content Taxonomy v2.2. + TAXONOMY_IAB_CONTENT_3_0 (5): + The IAB Content Taxonomy v3.0. + TAXONOMY_GOOGLE_STRUCTURED_VIDEO_1_0 (7): + The PPS structured video signals taxonomy. + """ + TAXONOMY_TYPE_UNSPECIFIED = 0 + TAXONOMY_IAB_AUDIENCE_1_1 = 3 + TAXONOMY_IAB_CONTENT_2_1 = 4 + TAXONOMY_IAB_CONTENT_2_2 = 6 + TAXONOMY_IAB_CONTENT_3_0 = 5 + TAXONOMY_GOOGLE_STRUCTURED_VIDEO_1_0 = 7 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/team_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/team_messages.py new file mode 100644 index 000000000000..1016c5f2437d --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/team_messages.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "Team", + }, +) + + +class Team(proto.Message): + r"""A Team defines a grouping of users and what entities they + have access to. + + Attributes: + name (str): + Identifier. The resource name of the ``Team``. Format: + ``networks/{network_code}/teams/{team_id}`` + team_id (int): + Output only. The unique ID of the Team. This + value is assigned by Google. Teams that are + created by Google will have negative IDs. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + team_id: int = proto.Field( + proto.INT64, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/team_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/team_service.py deleted file mode 100644 index 2e2a2ed2d4ca..000000000000 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/team_service.py +++ /dev/null @@ -1,168 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -__protobuf__ = proto.module( - package="google.ads.admanager.v1", - manifest={ - "Team", - "GetTeamRequest", - "ListTeamsRequest", - "ListTeamsResponse", - }, -) - - -class Team(proto.Message): - r"""The Team resource. - - Attributes: - name (str): - Identifier. The resource name of the Team. Format: - ``networks/{network_code}/teams/{team_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetTeamRequest(proto.Message): - r"""Request object for GetTeam method. - - Attributes: - name (str): - Required. The resource name of the Team. Format: - ``networks/{network_code}/teams/{team_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListTeamsRequest(proto.Message): - r"""Request object for ListTeams method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of Teams. - Format: ``networks/{network_code}`` - page_size (int): - Optional. The maximum number of Teams to - return. The service may return fewer than this - value. If unspecified, at most 50 teams will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. A page token, received from a previous - ``ListTeams`` call. Provide this to retrieve the subsequent - page. - - When paginating, all other parameters provided to - ``ListTeams`` must match the call that provided the page - token. - filter (str): - Optional. Expression to filter the response. - See syntax details at - https://developers.google.com/ad-manager/api/beta/filters - order_by (str): - Optional. Expression to specify sorting - order. See syntax details at - https://developers.google.com/ad-manager/api/beta/filters#order - skip (int): - Optional. Number of individual resources to - skip while paginating. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - skip: int = proto.Field( - proto.INT32, - number=6, - ) - - -class ListTeamsResponse(proto.Message): - r"""Response object for ListTeamsRequest containing matching Team - resources. - - Attributes: - teams (MutableSequence[google.ads.admanager_v1.types.Team]): - The Team from the specified network. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - total_size (int): - Total number of Teams. If a filter was included in the - request, this reflects the total number after the filtering - is applied. - - ``total_size`` will not be calculated in the response unless - it has been included in a response field mask. The response - field mask can be provided to the method by using the URL - parameter ``$fields`` or ``fields``, or by using the - HTTP/gRPC header ``X-Goog-FieldMask``. - - For more information, see - https://developers.google.com/ad-manager/api/beta/field-masks - """ - - @property - def raw_page(self): - return self - - teams: MutableSequence["Team"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="Team", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - total_size: int = proto.Field( - proto.INT32, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/time_unit_enum.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/time_unit_enum.py new file mode 100644 index 000000000000..f3a738c516c4 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/time_unit_enum.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "TimeUnitEnum", + }, +) + + +class TimeUnitEnum(proto.Message): + r"""Wrapper message for TimeUnit.""" + + class TimeUnit(proto.Enum): + r"""Unit of time for the frequency cap. + + Values: + TIME_UNIT_UNSPECIFIED (0): + Default value. This value is unused. + MINUTE (1): + Minute + HOUR (2): + Hour + DAY (3): + Day + WEEK (4): + Week + MONTH (5): + Month + LIFETIME (6): + Lifetime + POD (7): + Per pod of ads in a video stream. Only valid for entities in + a VIDEO_PLAYER environment. + STREAM (8): + Per video stream. Only valid for entities in a VIDEO_PLAYER + environment. + """ + TIME_UNIT_UNSPECIFIED = 0 + MINUTE = 1 + HOUR = 2 + DAY = 3 + WEEK = 4 + MONTH = 5 + LIFETIME = 6 + POD = 7 + STREAM = 8 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/user_messages.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/user_messages.py new file mode 100644 index 000000000000..90d2d72e5e60 --- /dev/null +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/user_messages.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ads.admanager.v1", + manifest={ + "User", + }, +) + + +class User(proto.Message): + r"""The User resource. + + Attributes: + name (str): + Identifier. The resource name of the User. Format: + ``networks/{network_code}/users/{user_id}`` + user_id (int): + Output only. ``User`` ID. + display_name (str): + Required. The name of the User. It has a + maximum length of 128 characters. + email (str): + Required. The email or login of the User. In + order to create a new user, you must already + have a Google Account. + role (str): + Required. The unique Role ID of the User. + Roles that are created by Google will have + negative IDs. + active (bool): + Output only. Specifies whether or not the + User is active. An inactive user cannot log in + to the system or perform any operations. + external_id (str): + Optional. An identifier for the User that is + meaningful to the publisher. This attribute has + a maximum length of 255 characters. + service_account (bool): + Output only. Whether the user is an OAuth2 + service account user. Service account users can + only be added through the UI. + orders_ui_local_time_zone (str): + Optional. The IANA Time Zone Database time zone, e.g. + "America/New_York", used in the orders and line items UI for + this User. If not provided, the UI then defaults to using + the Network's timezone. This setting only affects the UI for + this user and does not affect the timezone of any dates and + times returned in API responses. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + user_id: int = proto.Field( + proto.INT64, + number=10, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + email: str = proto.Field( + proto.STRING, + number=3, + ) + role: str = proto.Field( + proto.STRING, + number=4, + ) + active: bool = proto.Field( + proto.BOOL, + number=6, + ) + external_id: str = proto.Field( + proto.STRING, + number=7, + ) + service_account: bool = proto.Field( + proto.BOOL, + number=8, + ) + orders_ui_local_time_zone: str = proto.Field( + proto.STRING, + number=9, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/types/user_service.py b/packages/google-ads-admanager/google/ads/admanager_v1/types/user_service.py index 1715ffb0e7c3..931e36a32f12 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/types/user_service.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/types/user_service.py @@ -22,93 +22,11 @@ __protobuf__ = proto.module( package="google.ads.admanager.v1", manifest={ - "User", "GetUserRequest", - "ListUsersRequest", - "ListUsersResponse", }, ) -class User(proto.Message): - r"""The User resource. - - Attributes: - name (str): - Identifier. The resource name of the User. Format: - ``networks/{network_code}/users/{user_id}`` - user_id (int): - Output only. ``User`` ID. - display_name (str): - Required. The name of the User. It has a - maximum length of 128 characters. - email (str): - Required. The email or login of the User. In - order to create a new user, you must already - have a Google Account. - role (str): - Required. The unique Role ID of the User. - Roles that are created by Google will have - negative IDs. - active (bool): - Output only. Specifies whether or not the - User is active. An inactive user cannot log in - to the system or perform any operations. - external_id (str): - Optional. An identifier for the User that is - meaningful to the publisher. This attribute has - a maximum length of 255 characters. - service_account (bool): - Output only. Whether the user is an OAuth2 - service account user. Service account users can - only be added through the UI. - orders_ui_local_time_zone (str): - Optional. The IANA Time Zone Database time zone, e.g. - "America/New_York", used in the orders and line items UI for - this User. If not provided, the UI then defaults to using - the Network's timezone. This setting only affects the UI for - this user and does not affect the timezone of any dates and - times returned in API responses. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - user_id: int = proto.Field( - proto.INT64, - number=10, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - email: str = proto.Field( - proto.STRING, - number=3, - ) - role: str = proto.Field( - proto.STRING, - number=4, - ) - active: bool = proto.Field( - proto.BOOL, - number=6, - ) - external_id: str = proto.Field( - proto.STRING, - number=7, - ) - service_account: bool = proto.Field( - proto.BOOL, - number=8, - ) - orders_ui_local_time_zone: str = proto.Field( - proto.STRING, - number=9, - ) - - class GetUserRequest(proto.Message): r"""Request object for GetUser method. @@ -124,109 +42,4 @@ class GetUserRequest(proto.Message): ) -class ListUsersRequest(proto.Message): - r"""Request object for ListUsers method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of Users. - Format: ``networks/{network_code}`` - page_size (int): - Optional. The maximum number of Users to - return. The service may return fewer than this - value. If unspecified, at most 50 users will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. A page token, received from a previous - ``ListUsers`` call. Provide this to retrieve the subsequent - page. - - When paginating, all other parameters provided to - ``ListUsers`` must match the call that provided the page - token. - filter (str): - Optional. Expression to filter the response. - See syntax details at - https://developers.google.com/ad-manager/api/beta/filters - order_by (str): - Optional. Expression to specify sorting - order. See syntax details at - https://developers.google.com/ad-manager/api/beta/filters#order - skip (int): - Optional. Number of individual resources to - skip while paginating. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - skip: int = proto.Field( - proto.INT32, - number=6, - ) - - -class ListUsersResponse(proto.Message): - r"""Response object for ListUsersRequest containing matching User - resources. - - Attributes: - users (MutableSequence[google.ads.admanager_v1.types.User]): - The User from the specified network. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - total_size (int): - Total number of Users. If a filter was included in the - request, this reflects the total number after the filtering - is applied. - - ``total_size`` will not be calculated in the response unless - it has been included in a response field mask. The response - field mask can be provided to the method by using the URL - parameter ``$fields`` or ``fields``, or by using the - HTTP/gRPC header ``X-Goog-FieldMask``. - - For more information, see - https://developers.google.com/ad-manager/api/beta/field-masks - """ - - @property - def raw_page(self): - return self - - users: MutableSequence["User"] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message="User", - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - total_size: int = proto.Field( - proto.INT32, - number=3, - ) - - __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_line_item_service_list_line_items_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_unit_service_list_ad_unit_sizes_sync.py similarity index 81% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_line_item_service_list_line_items_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_unit_service_list_ad_unit_sizes_sync.py index 822761814315..eb38242abe84 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_line_item_service_list_line_items_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_unit_service_list_ad_unit_sizes_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListLineItems +# Snippet for ListAdUnitSizes # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_LineItemService_ListLineItems_sync] +# [START admanager_v1_generated_AdUnitService_ListAdUnitSizes_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,20 +34,20 @@ from google.ads import admanager_v1 -def sample_list_line_items(): +def sample_list_ad_unit_sizes(): # Create a client - client = admanager_v1.LineItemServiceClient() + client = admanager_v1.AdUnitServiceClient() # Initialize request argument(s) - request = admanager_v1.ListLineItemsRequest( + request = admanager_v1.ListAdUnitSizesRequest( parent="parent_value", ) # Make the request - page_result = client.list_line_items(request=request) + page_result = client.list_ad_unit_sizes(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_LineItemService_ListLineItems_sync] +# [END admanager_v1_generated_AdUnitService_ListAdUnitSizes_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_create_entity_signals_mappings_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_create_entity_signals_mappings_sync.py new file mode 100644 index 000000000000..d74224f42761 --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_create_entity_signals_mappings_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateEntitySignalsMappings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_EntitySignalsMappingService_BatchCreateEntitySignalsMappings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_batch_create_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + requests = admanager_v1.CreateEntitySignalsMappingRequest() + requests.parent = "parent_value" + requests.entity_signals_mapping.audience_segment_id = 1980 + requests.entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.BatchCreateEntitySignalsMappingsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_create_entity_signals_mappings(request=request) + + # Handle the response + print(response) + +# [END admanager_v1_generated_EntitySignalsMappingService_BatchCreateEntitySignalsMappings_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_update_entity_signals_mappings_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_update_entity_signals_mappings_sync.py new file mode 100644 index 000000000000..a1fd341565ed --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_batch_update_entity_signals_mappings_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateEntitySignalsMappings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_EntitySignalsMappingService_BatchUpdateEntitySignalsMappings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_batch_update_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + requests = admanager_v1.UpdateEntitySignalsMappingRequest() + requests.entity_signals_mapping.audience_segment_id = 1980 + requests.entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.BatchUpdateEntitySignalsMappingsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_update_entity_signals_mappings(request=request) + + # Handle the response + print(response) + +# [END admanager_v1_generated_EntitySignalsMappingService_BatchUpdateEntitySignalsMappings_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_create_entity_signals_mapping_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_create_entity_signals_mapping_sync.py new file mode 100644 index 000000000000..f2802f0f727b --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_create_entity_signals_mapping_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntitySignalsMapping +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_EntitySignalsMappingService_CreateEntitySignalsMapping_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_create_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + entity_signals_mapping = admanager_v1.EntitySignalsMapping() + entity_signals_mapping.audience_segment_id = 1980 + entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.CreateEntitySignalsMappingRequest( + parent="parent_value", + entity_signals_mapping=entity_signals_mapping, + ) + + # Make the request + response = client.create_entity_signals_mapping(request=request) + + # Handle the response + print(response) + +# [END admanager_v1_generated_EntitySignalsMappingService_CreateEntitySignalsMapping_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_creative_service_get_creative_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_get_entity_signals_mapping_sync.py similarity index 77% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_creative_service_get_creative_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_get_entity_signals_mapping_sync.py index 95638bb68c5e..071cc603a204 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_creative_service_get_creative_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_get_entity_signals_mapping_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetCreative +# Snippet for GetEntitySignalsMapping # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_CreativeService_GetCreative_sync] +# [START admanager_v1_generated_EntitySignalsMappingService_GetEntitySignalsMapping_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,19 +34,19 @@ from google.ads import admanager_v1 -def sample_get_creative(): +def sample_get_entity_signals_mapping(): # Create a client - client = admanager_v1.CreativeServiceClient() + client = admanager_v1.EntitySignalsMappingServiceClient() # Initialize request argument(s) - request = admanager_v1.GetCreativeRequest( + request = admanager_v1.GetEntitySignalsMappingRequest( name="name_value", ) # Make the request - response = client.get_creative(request=request) + response = client.get_entity_signals_mapping(request=request) # Handle the response print(response) -# [END admanager_v1_generated_CreativeService_GetCreative_sync] +# [END admanager_v1_generated_EntitySignalsMappingService_GetEntitySignalsMapping_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_list_entity_signals_mappings_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_list_entity_signals_mappings_sync.py new file mode 100644 index 000000000000..530858bf5334 --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_list_entity_signals_mappings_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntitySignalsMappings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_EntitySignalsMappingService_ListEntitySignalsMappings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_list_entity_signals_mappings(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + request = admanager_v1.ListEntitySignalsMappingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entity_signals_mappings(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END admanager_v1_generated_EntitySignalsMappingService_ListEntitySignalsMappings_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_update_entity_signals_mapping_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_update_entity_signals_mapping_sync.py new file mode 100644 index 000000000000..4a869eafe22f --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_entity_signals_mapping_service_update_entity_signals_mapping_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntitySignalsMapping +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_EntitySignalsMappingService_UpdateEntitySignalsMapping_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_update_entity_signals_mapping(): + # Create a client + client = admanager_v1.EntitySignalsMappingServiceClient() + + # Initialize request argument(s) + entity_signals_mapping = admanager_v1.EntitySignalsMapping() + entity_signals_mapping.audience_segment_id = 1980 + entity_signals_mapping.taxonomy_category_ids = [2268, 2269] + + request = admanager_v1.UpdateEntitySignalsMappingRequest( + entity_signals_mapping=entity_signals_mapping, + ) + + # Make the request + response = client.update_entity_signals_mapping(request=request) + + # Handle the response + print(response) + +# [END admanager_v1_generated_EntitySignalsMappingService_UpdateEntitySignalsMapping_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_label_service_get_label_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_network_service_list_networks_sync.py similarity index 81% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_label_service_get_label_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_network_service_list_networks_sync.py index f0d7ab8c7c3c..534cec07a4ec 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_label_service_get_label_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_network_service_list_networks_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetLabel +# Snippet for ListNetworks # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_LabelService_GetLabel_sync] +# [START admanager_v1_generated_NetworkService_ListNetworks_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,19 +34,18 @@ from google.ads import admanager_v1 -def sample_get_label(): +def sample_list_networks(): # Create a client - client = admanager_v1.LabelServiceClient() + client = admanager_v1.NetworkServiceClient() # Initialize request argument(s) - request = admanager_v1.GetLabelRequest( - name="name_value", + request = admanager_v1.ListNetworksRequest( ) # Make the request - response = client.get_label(request=request) + response = client.list_networks(request=request) # Handle the response print(response) -# [END admanager_v1_generated_LabelService_GetLabel_sync] +# [END admanager_v1_generated_NetworkService_ListNetworks_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_create_report_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_create_report_sync.py new file mode 100644 index 000000000000..dd81bba668bf --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_create_report_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_ReportService_CreateReport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_create_report(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + report = admanager_v1.Report() + report.report_definition.dimensions = ['CUSTOM_DIMENSION_9_VALUE'] + report.report_definition.metrics = ['YIELD_GROUP_MEDIATION_THIRD_PARTY_ECPM'] + report.report_definition.report_type = "HISTORICAL" + + request = admanager_v1.CreateReportRequest( + parent="parent_value", + report=report, + ) + + # Make the request + response = client.create_report(request=request) + + # Handle the response + print(response) + +# [END admanager_v1_generated_ReportService_CreateReport_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_creative_service_list_creatives_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_fetch_report_result_rows_sync.py similarity index 79% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_creative_service_list_creatives_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_fetch_report_result_rows_sync.py index 95328d799b08..7c366358878e 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_creative_service_list_creatives_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_fetch_report_result_rows_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListCreatives +# Snippet for FetchReportResultRows # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_CreativeService_ListCreatives_sync] +# [START admanager_v1_generated_ReportService_FetchReportResultRows_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,20 +34,19 @@ from google.ads import admanager_v1 -def sample_list_creatives(): +def sample_fetch_report_result_rows(): # Create a client - client = admanager_v1.CreativeServiceClient() + client = admanager_v1.ReportServiceClient() # Initialize request argument(s) - request = admanager_v1.ListCreativesRequest( - parent="parent_value", + request = admanager_v1.FetchReportResultRowsRequest( ) # Make the request - page_result = client.list_creatives(request=request) + page_result = client.fetch_report_result_rows(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_CreativeService_ListCreatives_sync] +# [END admanager_v1_generated_ReportService_FetchReportResultRows_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_get_team_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_get_report_sync.py similarity index 82% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_get_team_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_get_report_sync.py index 00fdea6f590b..b52631c8aa86 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_get_team_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_get_report_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetTeam +# Snippet for GetReport # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_TeamService_GetTeam_sync] +# [START admanager_v1_generated_ReportService_GetReport_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,19 +34,19 @@ from google.ads import admanager_v1 -def sample_get_team(): +def sample_get_report(): # Create a client - client = admanager_v1.TeamServiceClient() + client = admanager_v1.ReportServiceClient() # Initialize request argument(s) - request = admanager_v1.GetTeamRequest( + request = admanager_v1.GetReportRequest( name="name_value", ) # Make the request - response = client.get_team(request=request) + response = client.get_report(request=request) # Handle the response print(response) -# [END admanager_v1_generated_TeamService_GetTeam_sync] +# [END admanager_v1_generated_ReportService_GetReport_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_user_service_list_users_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_list_reports_sync.py similarity index 82% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_user_service_list_users_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_list_reports_sync.py index 81f549f0e5bb..451d5fa0a1ff 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_user_service_list_users_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_list_reports_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListUsers +# Snippet for ListReports # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_UserService_ListUsers_sync] +# [START admanager_v1_generated_ReportService_ListReports_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,20 +34,20 @@ from google.ads import admanager_v1 -def sample_list_users(): +def sample_list_reports(): # Create a client - client = admanager_v1.UserServiceClient() + client = admanager_v1.ReportServiceClient() # Initialize request argument(s) - request = admanager_v1.ListUsersRequest( + request = admanager_v1.ListReportsRequest( parent="parent_value", ) # Make the request - page_result = client.list_users(request=request) + page_result = client.list_reports(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_UserService_ListUsers_sync] +# [END admanager_v1_generated_ReportService_ListReports_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_export_saved_report_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_run_report_sync.py similarity index 82% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_export_saved_report_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_run_report_sync.py index 745769ffb3a9..d49b2529ce79 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_export_saved_report_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_run_report_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ExportSavedReport +# Snippet for RunReport # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_ReportService_ExportSavedReport_sync] +# [START admanager_v1_generated_ReportService_RunReport_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,17 +34,17 @@ from google.ads import admanager_v1 -def sample_export_saved_report(): +def sample_run_report(): # Create a client client = admanager_v1.ReportServiceClient() # Initialize request argument(s) - request = admanager_v1.ExportSavedReportRequest( - format_="XML", + request = admanager_v1.RunReportRequest( + name="name_value", ) # Make the request - operation = client.export_saved_report(request=request) + operation = client.run_report(request=request) print("Waiting for operation to complete...") @@ -53,4 +53,4 @@ def sample_export_saved_report(): # Handle the response print(response) -# [END admanager_v1_generated_ReportService_ExportSavedReport_sync] +# [END admanager_v1_generated_ReportService_RunReport_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_update_report_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_update_report_sync.py new file mode 100644 index 000000000000..334c10534208 --- /dev/null +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_report_service_update_report_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-admanager + + +# [START admanager_v1_generated_ReportService_UpdateReport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import admanager_v1 + + +def sample_update_report(): + # Create a client + client = admanager_v1.ReportServiceClient() + + # Initialize request argument(s) + report = admanager_v1.Report() + report.report_definition.dimensions = ['CUSTOM_DIMENSION_9_VALUE'] + report.report_definition.metrics = ['YIELD_GROUP_MEDIATION_THIRD_PARTY_ECPM'] + report.report_definition.report_type = "HISTORICAL" + + request = admanager_v1.UpdateReportRequest( + report=report, + ) + + # Make the request + response = client.update_report(request=request) + + # Handle the response + print(response) + +# [END admanager_v1_generated_ReportService_UpdateReport_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_partner_service_get_ad_partner_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_taxonomy_category_service_get_taxonomy_category_sync.py similarity index 78% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_partner_service_get_ad_partner_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_taxonomy_category_service_get_taxonomy_category_sync.py index ccf7c243f194..5b3de6b5f618 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_partner_service_get_ad_partner_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_taxonomy_category_service_get_taxonomy_category_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetAdPartner +# Snippet for GetTaxonomyCategory # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_AdPartnerService_GetAdPartner_sync] +# [START admanager_v1_generated_TaxonomyCategoryService_GetTaxonomyCategory_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,19 +34,19 @@ from google.ads import admanager_v1 -def sample_get_ad_partner(): +def sample_get_taxonomy_category(): # Create a client - client = admanager_v1.AdPartnerServiceClient() + client = admanager_v1.TaxonomyCategoryServiceClient() # Initialize request argument(s) - request = admanager_v1.GetAdPartnerRequest( + request = admanager_v1.GetTaxonomyCategoryRequest( name="name_value", ) # Make the request - response = client.get_ad_partner(request=request) + response = client.get_taxonomy_category(request=request) # Handle the response print(response) -# [END admanager_v1_generated_AdPartnerService_GetAdPartner_sync] +# [END admanager_v1_generated_TaxonomyCategoryService_GetTaxonomyCategory_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_partner_service_list_ad_partners_sync.py b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_taxonomy_category_service_list_taxonomy_categories_sync.py similarity index 78% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_partner_service_list_ad_partners_sync.py rename to packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_taxonomy_category_service_list_taxonomy_categories_sync.py index 41b026a5771f..bde804ae22a1 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_ad_partner_service_list_ad_partners_sync.py +++ b/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_taxonomy_category_service_list_taxonomy_categories_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListAdPartners +# Snippet for ListTaxonomyCategories # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-ads-admanager -# [START admanager_v1_generated_AdPartnerService_ListAdPartners_sync] +# [START admanager_v1_generated_TaxonomyCategoryService_ListTaxonomyCategories_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,20 +34,20 @@ from google.ads import admanager_v1 -def sample_list_ad_partners(): +def sample_list_taxonomy_categories(): # Create a client - client = admanager_v1.AdPartnerServiceClient() + client = admanager_v1.TaxonomyCategoryServiceClient() # Initialize request argument(s) - request = admanager_v1.ListAdPartnersRequest( + request = admanager_v1.ListTaxonomyCategoriesRequest( parent="parent_value", ) # Make the request - page_result = client.list_ad_partners(request=request) + page_result = client.list_taxonomy_categories(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_AdPartnerService_ListAdPartners_sync] +# [END admanager_v1_generated_TaxonomyCategoryService_ListTaxonomyCategories_sync] diff --git a/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json b/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json index 5910a1b6bf99..d59199618c62 100644 --- a/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json +++ b/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json @@ -8,29 +8,29 @@ ], "language": "PYTHON", "name": "google-ads-admanager", - "version": "0.1.0" + "version": "0.2.0" }, "snippets": [ { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.AdPartnerServiceClient", - "shortName": "AdPartnerServiceClient" + "fullName": "google.ads.admanager_v1.AdUnitServiceClient", + "shortName": "AdUnitServiceClient" }, - "fullName": "google.ads.admanager_v1.AdPartnerServiceClient.get_ad_partner", + "fullName": "google.ads.admanager_v1.AdUnitServiceClient.get_ad_unit", "method": { - "fullName": "google.ads.admanager.v1.AdPartnerService.GetAdPartner", + "fullName": "google.ads.admanager.v1.AdUnitService.GetAdUnit", "service": { - "fullName": "google.ads.admanager.v1.AdPartnerService", - "shortName": "AdPartnerService" + "fullName": "google.ads.admanager.v1.AdUnitService", + "shortName": "AdUnitService" }, - "shortName": "GetAdPartner" + "shortName": "GetAdUnit" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetAdPartnerRequest" + "type": "google.ads.admanager_v1.types.GetAdUnitRequest" }, { "name": "name", @@ -49,14 +49,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.AdPartner", - "shortName": "get_ad_partner" + "resultType": "google.ads.admanager_v1.types.AdUnit", + "shortName": "get_ad_unit" }, - "description": "Sample for GetAdPartner", - "file": "admanager_v1_generated_ad_partner_service_get_ad_partner_sync.py", + "description": "Sample for GetAdUnit", + "file": "admanager_v1_generated_ad_unit_service_get_ad_unit_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_AdPartnerService_GetAdPartner_sync", + "regionTag": "admanager_v1_generated_AdUnitService_GetAdUnit_sync", "segments": [ { "end": 51, @@ -89,87 +89,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_ad_partner_service_get_ad_partner_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.ads.admanager_v1.AdPartnerServiceClient", - "shortName": "AdPartnerServiceClient" - }, - "fullName": "google.ads.admanager_v1.AdPartnerServiceClient.list_ad_partners", - "method": { - "fullName": "google.ads.admanager.v1.AdPartnerService.ListAdPartners", - "service": { - "fullName": "google.ads.admanager.v1.AdPartnerService", - "shortName": "AdPartnerService" - }, - "shortName": "ListAdPartners" - }, - "parameters": [ - { - "name": "request", - "type": "google.ads.admanager_v1.types.ListAdPartnersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.ads.admanager_v1.services.ad_partner_service.pagers.ListAdPartnersPager", - "shortName": "list_ad_partners" - }, - "description": "Sample for ListAdPartners", - "file": "admanager_v1_generated_ad_partner_service_list_ad_partners_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_AdPartnerService_ListAdPartners_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "admanager_v1_generated_ad_partner_service_list_ad_partners_sync.py" + "title": "admanager_v1_generated_ad_unit_service_get_ad_unit_sync.py" }, { "canonical": true, @@ -178,22 +98,22 @@ "fullName": "google.ads.admanager_v1.AdUnitServiceClient", "shortName": "AdUnitServiceClient" }, - "fullName": "google.ads.admanager_v1.AdUnitServiceClient.get_ad_unit", + "fullName": "google.ads.admanager_v1.AdUnitServiceClient.list_ad_unit_sizes", "method": { - "fullName": "google.ads.admanager.v1.AdUnitService.GetAdUnit", + "fullName": "google.ads.admanager.v1.AdUnitService.ListAdUnitSizes", "service": { "fullName": "google.ads.admanager.v1.AdUnitService", "shortName": "AdUnitService" }, - "shortName": "GetAdUnit" + "shortName": "ListAdUnitSizes" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetAdUnitRequest" + "type": "google.ads.admanager_v1.types.ListAdUnitSizesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -209,22 +129,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.AdUnit", - "shortName": "get_ad_unit" + "resultType": "google.ads.admanager_v1.services.ad_unit_service.pagers.ListAdUnitSizesPager", + "shortName": "list_ad_unit_sizes" }, - "description": "Sample for GetAdUnit", - "file": "admanager_v1_generated_ad_unit_service_get_ad_unit_sync.py", + "description": "Sample for ListAdUnitSizes", + "file": "admanager_v1_generated_ad_unit_service_list_ad_unit_sizes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_AdUnitService_GetAdUnit_sync", + "regionTag": "admanager_v1_generated_AdUnitService_ListAdUnitSizes_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -244,12 +164,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_ad_unit_service_get_ad_unit_sync.py" + "title": "admanager_v1_generated_ad_unit_service_list_ad_unit_sizes_sync.py" }, { "canonical": true, @@ -495,22 +415,22 @@ "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.ContactServiceClient", - "shortName": "ContactServiceClient" + "fullName": "google.ads.admanager_v1.CustomFieldServiceClient", + "shortName": "CustomFieldServiceClient" }, - "fullName": "google.ads.admanager_v1.ContactServiceClient.get_contact", + "fullName": "google.ads.admanager_v1.CustomFieldServiceClient.get_custom_field", "method": { - "fullName": "google.ads.admanager.v1.ContactService.GetContact", + "fullName": "google.ads.admanager.v1.CustomFieldService.GetCustomField", "service": { - "fullName": "google.ads.admanager.v1.ContactService", - "shortName": "ContactService" + "fullName": "google.ads.admanager.v1.CustomFieldService", + "shortName": "CustomFieldService" }, - "shortName": "GetContact" + "shortName": "GetCustomField" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetContactRequest" + "type": "google.ads.admanager_v1.types.GetCustomFieldRequest" }, { "name": "name", @@ -529,14 +449,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Contact", - "shortName": "get_contact" + "resultType": "google.ads.admanager_v1.types.CustomField", + "shortName": "get_custom_field" }, - "description": "Sample for GetContact", - "file": "admanager_v1_generated_contact_service_get_contact_sync.py", + "description": "Sample for GetCustomField", + "file": "admanager_v1_generated_custom_field_service_get_custom_field_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_ContactService_GetContact_sync", + "regionTag": "admanager_v1_generated_CustomFieldService_GetCustomField_sync", "segments": [ { "end": 51, @@ -569,28 +489,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_contact_service_get_contact_sync.py" + "title": "admanager_v1_generated_custom_field_service_get_custom_field_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.ContactServiceClient", - "shortName": "ContactServiceClient" + "fullName": "google.ads.admanager_v1.CustomFieldServiceClient", + "shortName": "CustomFieldServiceClient" }, - "fullName": "google.ads.admanager_v1.ContactServiceClient.list_contacts", + "fullName": "google.ads.admanager_v1.CustomFieldServiceClient.list_custom_fields", "method": { - "fullName": "google.ads.admanager.v1.ContactService.ListContacts", + "fullName": "google.ads.admanager.v1.CustomFieldService.ListCustomFields", "service": { - "fullName": "google.ads.admanager.v1.ContactService", - "shortName": "ContactService" + "fullName": "google.ads.admanager.v1.CustomFieldService", + "shortName": "CustomFieldService" }, - "shortName": "ListContacts" + "shortName": "ListCustomFields" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListContactsRequest" + "type": "google.ads.admanager_v1.types.ListCustomFieldsRequest" }, { "name": "parent", @@ -609,14 +529,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.contact_service.pagers.ListContactsPager", - "shortName": "list_contacts" + "resultType": "google.ads.admanager_v1.services.custom_field_service.pagers.ListCustomFieldsPager", + "shortName": "list_custom_fields" }, - "description": "Sample for ListContacts", - "file": "admanager_v1_generated_contact_service_list_contacts_sync.py", + "description": "Sample for ListCustomFields", + "file": "admanager_v1_generated_custom_field_service_list_custom_fields_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_ContactService_ListContacts_sync", + "regionTag": "admanager_v1_generated_CustomFieldService_ListCustomFields_sync", "segments": [ { "end": 52, @@ -649,28 +569,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_contact_service_list_contacts_sync.py" + "title": "admanager_v1_generated_custom_field_service_list_custom_fields_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CreativeServiceClient", - "shortName": "CreativeServiceClient" + "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient", + "shortName": "CustomTargetingKeyServiceClient" }, - "fullName": "google.ads.admanager_v1.CreativeServiceClient.get_creative", + "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient.get_custom_targeting_key", "method": { - "fullName": "google.ads.admanager.v1.CreativeService.GetCreative", + "fullName": "google.ads.admanager.v1.CustomTargetingKeyService.GetCustomTargetingKey", "service": { - "fullName": "google.ads.admanager.v1.CreativeService", - "shortName": "CreativeService" + "fullName": "google.ads.admanager.v1.CustomTargetingKeyService", + "shortName": "CustomTargetingKeyService" }, - "shortName": "GetCreative" + "shortName": "GetCustomTargetingKey" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetCreativeRequest" + "type": "google.ads.admanager_v1.types.GetCustomTargetingKeyRequest" }, { "name": "name", @@ -689,14 +609,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Creative", - "shortName": "get_creative" + "resultType": "google.ads.admanager_v1.types.CustomTargetingKey", + "shortName": "get_custom_targeting_key" }, - "description": "Sample for GetCreative", - "file": "admanager_v1_generated_creative_service_get_creative_sync.py", + "description": "Sample for GetCustomTargetingKey", + "file": "admanager_v1_generated_custom_targeting_key_service_get_custom_targeting_key_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CreativeService_GetCreative_sync", + "regionTag": "admanager_v1_generated_CustomTargetingKeyService_GetCustomTargetingKey_sync", "segments": [ { "end": 51, @@ -729,28 +649,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_creative_service_get_creative_sync.py" + "title": "admanager_v1_generated_custom_targeting_key_service_get_custom_targeting_key_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CreativeServiceClient", - "shortName": "CreativeServiceClient" + "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient", + "shortName": "CustomTargetingKeyServiceClient" }, - "fullName": "google.ads.admanager_v1.CreativeServiceClient.list_creatives", + "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient.list_custom_targeting_keys", "method": { - "fullName": "google.ads.admanager.v1.CreativeService.ListCreatives", + "fullName": "google.ads.admanager.v1.CustomTargetingKeyService.ListCustomTargetingKeys", "service": { - "fullName": "google.ads.admanager.v1.CreativeService", - "shortName": "CreativeService" + "fullName": "google.ads.admanager.v1.CustomTargetingKeyService", + "shortName": "CustomTargetingKeyService" }, - "shortName": "ListCreatives" + "shortName": "ListCustomTargetingKeys" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListCreativesRequest" + "type": "google.ads.admanager_v1.types.ListCustomTargetingKeysRequest" }, { "name": "parent", @@ -769,14 +689,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.creative_service.pagers.ListCreativesPager", - "shortName": "list_creatives" + "resultType": "google.ads.admanager_v1.services.custom_targeting_key_service.pagers.ListCustomTargetingKeysPager", + "shortName": "list_custom_targeting_keys" }, - "description": "Sample for ListCreatives", - "file": "admanager_v1_generated_creative_service_list_creatives_sync.py", + "description": "Sample for ListCustomTargetingKeys", + "file": "admanager_v1_generated_custom_targeting_key_service_list_custom_targeting_keys_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CreativeService_ListCreatives_sync", + "regionTag": "admanager_v1_generated_CustomTargetingKeyService_ListCustomTargetingKeys_sync", "segments": [ { "end": 52, @@ -809,28 +729,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_creative_service_list_creatives_sync.py" + "title": "admanager_v1_generated_custom_targeting_key_service_list_custom_targeting_keys_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CustomFieldServiceClient", - "shortName": "CustomFieldServiceClient" + "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient", + "shortName": "CustomTargetingValueServiceClient" }, - "fullName": "google.ads.admanager_v1.CustomFieldServiceClient.get_custom_field", + "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient.get_custom_targeting_value", "method": { - "fullName": "google.ads.admanager.v1.CustomFieldService.GetCustomField", + "fullName": "google.ads.admanager.v1.CustomTargetingValueService.GetCustomTargetingValue", "service": { - "fullName": "google.ads.admanager.v1.CustomFieldService", - "shortName": "CustomFieldService" + "fullName": "google.ads.admanager.v1.CustomTargetingValueService", + "shortName": "CustomTargetingValueService" }, - "shortName": "GetCustomField" + "shortName": "GetCustomTargetingValue" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetCustomFieldRequest" + "type": "google.ads.admanager_v1.types.GetCustomTargetingValueRequest" }, { "name": "name", @@ -849,14 +769,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.CustomField", - "shortName": "get_custom_field" + "resultType": "google.ads.admanager_v1.types.CustomTargetingValue", + "shortName": "get_custom_targeting_value" }, - "description": "Sample for GetCustomField", - "file": "admanager_v1_generated_custom_field_service_get_custom_field_sync.py", + "description": "Sample for GetCustomTargetingValue", + "file": "admanager_v1_generated_custom_targeting_value_service_get_custom_targeting_value_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CustomFieldService_GetCustomField_sync", + "regionTag": "admanager_v1_generated_CustomTargetingValueService_GetCustomTargetingValue_sync", "segments": [ { "end": 51, @@ -889,28 +809,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_custom_field_service_get_custom_field_sync.py" + "title": "admanager_v1_generated_custom_targeting_value_service_get_custom_targeting_value_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CustomFieldServiceClient", - "shortName": "CustomFieldServiceClient" + "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient", + "shortName": "CustomTargetingValueServiceClient" }, - "fullName": "google.ads.admanager_v1.CustomFieldServiceClient.list_custom_fields", + "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient.list_custom_targeting_values", "method": { - "fullName": "google.ads.admanager.v1.CustomFieldService.ListCustomFields", + "fullName": "google.ads.admanager.v1.CustomTargetingValueService.ListCustomTargetingValues", "service": { - "fullName": "google.ads.admanager.v1.CustomFieldService", - "shortName": "CustomFieldService" + "fullName": "google.ads.admanager.v1.CustomTargetingValueService", + "shortName": "CustomTargetingValueService" }, - "shortName": "ListCustomFields" + "shortName": "ListCustomTargetingValues" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListCustomFieldsRequest" + "type": "google.ads.admanager_v1.types.ListCustomTargetingValuesRequest" }, { "name": "parent", @@ -929,14 +849,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.custom_field_service.pagers.ListCustomFieldsPager", - "shortName": "list_custom_fields" + "resultType": "google.ads.admanager_v1.services.custom_targeting_value_service.pagers.ListCustomTargetingValuesPager", + "shortName": "list_custom_targeting_values" }, - "description": "Sample for ListCustomFields", - "file": "admanager_v1_generated_custom_field_service_list_custom_fields_sync.py", + "description": "Sample for ListCustomTargetingValues", + "file": "admanager_v1_generated_custom_targeting_value_service_list_custom_targeting_values_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CustomFieldService_ListCustomFields_sync", + "regionTag": "admanager_v1_generated_CustomTargetingValueService_ListCustomTargetingValues_sync", "segments": [ { "end": 52, @@ -969,33 +889,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_custom_field_service_list_custom_fields_sync.py" + "title": "admanager_v1_generated_custom_targeting_value_service_list_custom_targeting_values_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient", - "shortName": "CustomTargetingKeyServiceClient" + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient", + "shortName": "EntitySignalsMappingServiceClient" }, - "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient.get_custom_targeting_key", + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient.batch_create_entity_signals_mappings", "method": { - "fullName": "google.ads.admanager.v1.CustomTargetingKeyService.GetCustomTargetingKey", + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService.BatchCreateEntitySignalsMappings", "service": { - "fullName": "google.ads.admanager.v1.CustomTargetingKeyService", - "shortName": "CustomTargetingKeyService" + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService", + "shortName": "EntitySignalsMappingService" }, - "shortName": "GetCustomTargetingKey" + "shortName": "BatchCreateEntitySignalsMappings" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetCustomTargetingKeyRequest" + "type": "google.ads.admanager_v1.types.BatchCreateEntitySignalsMappingsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "requests", + "type": "MutableSequence[google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest]" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1009,22 +933,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.CustomTargetingKey", - "shortName": "get_custom_targeting_key" + "resultType": "google.ads.admanager_v1.types.BatchCreateEntitySignalsMappingsResponse", + "shortName": "batch_create_entity_signals_mappings" }, - "description": "Sample for GetCustomTargetingKey", - "file": "admanager_v1_generated_custom_targeting_key_service_get_custom_targeting_key_sync.py", + "description": "Sample for BatchCreateEntitySignalsMappings", + "file": "admanager_v1_generated_entity_signals_mapping_service_batch_create_entity_signals_mappings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CustomTargetingKeyService_GetCustomTargetingKey_sync", + "regionTag": "admanager_v1_generated_EntitySignalsMappingService_BatchCreateEntitySignalsMappings_sync", "segments": [ { - "end": 51, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 57, "start": 27, "type": "SHORT" }, @@ -1034,48 +958,52 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_custom_targeting_key_service_get_custom_targeting_key_sync.py" + "title": "admanager_v1_generated_entity_signals_mapping_service_batch_create_entity_signals_mappings_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient", - "shortName": "CustomTargetingKeyServiceClient" + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient", + "shortName": "EntitySignalsMappingServiceClient" }, - "fullName": "google.ads.admanager_v1.CustomTargetingKeyServiceClient.list_custom_targeting_keys", + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient.batch_update_entity_signals_mappings", "method": { - "fullName": "google.ads.admanager.v1.CustomTargetingKeyService.ListCustomTargetingKeys", + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService.BatchUpdateEntitySignalsMappings", "service": { - "fullName": "google.ads.admanager.v1.CustomTargetingKeyService", - "shortName": "CustomTargetingKeyService" + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService", + "shortName": "EntitySignalsMappingService" }, - "shortName": "ListCustomTargetingKeys" + "shortName": "BatchUpdateEntitySignalsMappings" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListCustomTargetingKeysRequest" + "type": "google.ads.admanager_v1.types.BatchUpdateEntitySignalsMappingsRequest" }, { "name": "parent", "type": "str" }, + { + "name": "requests", + "type": "MutableSequence[google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest]" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1089,22 +1017,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.custom_targeting_key_service.pagers.ListCustomTargetingKeysPager", - "shortName": "list_custom_targeting_keys" + "resultType": "google.ads.admanager_v1.types.BatchUpdateEntitySignalsMappingsResponse", + "shortName": "batch_update_entity_signals_mappings" }, - "description": "Sample for ListCustomTargetingKeys", - "file": "admanager_v1_generated_custom_targeting_key_service_list_custom_targeting_keys_sync.py", + "description": "Sample for BatchUpdateEntitySignalsMappings", + "file": "admanager_v1_generated_entity_signals_mapping_service_batch_update_entity_signals_mappings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CustomTargetingKeyService_ListCustomTargetingKeys_sync", + "regionTag": "admanager_v1_generated_EntitySignalsMappingService_BatchUpdateEntitySignalsMappings_sync", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -1114,43 +1042,127 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "admanager_v1_generated_entity_signals_mapping_service_batch_update_entity_signals_mappings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient", + "shortName": "EntitySignalsMappingServiceClient" + }, + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient.create_entity_signals_mapping", + "method": { + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService.CreateEntitySignalsMapping", + "service": { + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService", + "shortName": "EntitySignalsMappingService" + }, + "shortName": "CreateEntitySignalsMapping" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.admanager_v1.types.CreateEntitySignalsMappingRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_signals_mapping", + "type": "google.ads.admanager_v1.types.EntitySignalsMapping" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.admanager_v1.types.EntitySignalsMapping", + "shortName": "create_entity_signals_mapping" + }, + "description": "Sample for CreateEntitySignalsMapping", + "file": "admanager_v1_generated_entity_signals_mapping_service_create_entity_signals_mapping_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "admanager_v1_generated_EntitySignalsMappingService_CreateEntitySignalsMapping_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, { "end": 53, - "start": 49, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_custom_targeting_key_service_list_custom_targeting_keys_sync.py" + "title": "admanager_v1_generated_entity_signals_mapping_service_create_entity_signals_mapping_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient", - "shortName": "CustomTargetingValueServiceClient" + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient", + "shortName": "EntitySignalsMappingServiceClient" }, - "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient.get_custom_targeting_value", + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient.get_entity_signals_mapping", "method": { - "fullName": "google.ads.admanager.v1.CustomTargetingValueService.GetCustomTargetingValue", + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService.GetEntitySignalsMapping", "service": { - "fullName": "google.ads.admanager.v1.CustomTargetingValueService", - "shortName": "CustomTargetingValueService" + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService", + "shortName": "EntitySignalsMappingService" }, - "shortName": "GetCustomTargetingValue" + "shortName": "GetEntitySignalsMapping" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetCustomTargetingValueRequest" + "type": "google.ads.admanager_v1.types.GetEntitySignalsMappingRequest" }, { "name": "name", @@ -1169,14 +1181,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.CustomTargetingValue", - "shortName": "get_custom_targeting_value" + "resultType": "google.ads.admanager_v1.types.EntitySignalsMapping", + "shortName": "get_entity_signals_mapping" }, - "description": "Sample for GetCustomTargetingValue", - "file": "admanager_v1_generated_custom_targeting_value_service_get_custom_targeting_value_sync.py", + "description": "Sample for GetEntitySignalsMapping", + "file": "admanager_v1_generated_entity_signals_mapping_service_get_entity_signals_mapping_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CustomTargetingValueService_GetCustomTargetingValue_sync", + "regionTag": "admanager_v1_generated_EntitySignalsMappingService_GetEntitySignalsMapping_sync", "segments": [ { "end": 51, @@ -1209,28 +1221,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_custom_targeting_value_service_get_custom_targeting_value_sync.py" + "title": "admanager_v1_generated_entity_signals_mapping_service_get_entity_signals_mapping_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient", - "shortName": "CustomTargetingValueServiceClient" + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient", + "shortName": "EntitySignalsMappingServiceClient" }, - "fullName": "google.ads.admanager_v1.CustomTargetingValueServiceClient.list_custom_targeting_values", + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient.list_entity_signals_mappings", "method": { - "fullName": "google.ads.admanager.v1.CustomTargetingValueService.ListCustomTargetingValues", + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService.ListEntitySignalsMappings", "service": { - "fullName": "google.ads.admanager.v1.CustomTargetingValueService", - "shortName": "CustomTargetingValueService" + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService", + "shortName": "EntitySignalsMappingService" }, - "shortName": "ListCustomTargetingValues" + "shortName": "ListEntitySignalsMappings" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListCustomTargetingValuesRequest" + "type": "google.ads.admanager_v1.types.ListEntitySignalsMappingsRequest" }, { "name": "parent", @@ -1249,14 +1261,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.custom_targeting_value_service.pagers.ListCustomTargetingValuesPager", - "shortName": "list_custom_targeting_values" + "resultType": "google.ads.admanager_v1.services.entity_signals_mapping_service.pagers.ListEntitySignalsMappingsPager", + "shortName": "list_entity_signals_mappings" }, - "description": "Sample for ListCustomTargetingValues", - "file": "admanager_v1_generated_custom_targeting_value_service_list_custom_targeting_values_sync.py", + "description": "Sample for ListEntitySignalsMappings", + "file": "admanager_v1_generated_entity_signals_mapping_service_list_entity_signals_mappings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_CustomTargetingValueService_ListCustomTargetingValues_sync", + "regionTag": "admanager_v1_generated_EntitySignalsMappingService_ListEntitySignalsMappings_sync", "segments": [ { "end": 52, @@ -1289,28 +1301,268 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_custom_targeting_value_service_list_custom_targeting_values_sync.py" + "title": "admanager_v1_generated_entity_signals_mapping_service_list_entity_signals_mappings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient", + "shortName": "EntitySignalsMappingServiceClient" + }, + "fullName": "google.ads.admanager_v1.EntitySignalsMappingServiceClient.update_entity_signals_mapping", + "method": { + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService.UpdateEntitySignalsMapping", + "service": { + "fullName": "google.ads.admanager.v1.EntitySignalsMappingService", + "shortName": "EntitySignalsMappingService" + }, + "shortName": "UpdateEntitySignalsMapping" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.admanager_v1.types.UpdateEntitySignalsMappingRequest" + }, + { + "name": "entity_signals_mapping", + "type": "google.ads.admanager_v1.types.EntitySignalsMapping" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.admanager_v1.types.EntitySignalsMapping", + "shortName": "update_entity_signals_mapping" + }, + "description": "Sample for UpdateEntitySignalsMapping", + "file": "admanager_v1_generated_entity_signals_mapping_service_update_entity_signals_mapping_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "admanager_v1_generated_EntitySignalsMappingService_UpdateEntitySignalsMapping_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "admanager_v1_generated_entity_signals_mapping_service_update_entity_signals_mapping_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.LabelServiceClient", - "shortName": "LabelServiceClient" + "fullName": "google.ads.admanager_v1.NetworkServiceClient", + "shortName": "NetworkServiceClient" + }, + "fullName": "google.ads.admanager_v1.NetworkServiceClient.get_network", + "method": { + "fullName": "google.ads.admanager.v1.NetworkService.GetNetwork", + "service": { + "fullName": "google.ads.admanager.v1.NetworkService", + "shortName": "NetworkService" + }, + "shortName": "GetNetwork" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.admanager_v1.types.GetNetworkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.admanager_v1.types.Network", + "shortName": "get_network" + }, + "description": "Sample for GetNetwork", + "file": "admanager_v1_generated_network_service_get_network_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "admanager_v1_generated_NetworkService_GetNetwork_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "admanager_v1_generated_network_service_get_network_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.admanager_v1.NetworkServiceClient", + "shortName": "NetworkServiceClient" + }, + "fullName": "google.ads.admanager_v1.NetworkServiceClient.list_networks", + "method": { + "fullName": "google.ads.admanager.v1.NetworkService.ListNetworks", + "service": { + "fullName": "google.ads.admanager.v1.NetworkService", + "shortName": "NetworkService" + }, + "shortName": "ListNetworks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.admanager_v1.types.ListNetworksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.admanager_v1.types.ListNetworksResponse", + "shortName": "list_networks" + }, + "description": "Sample for ListNetworks", + "file": "admanager_v1_generated_network_service_list_networks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "admanager_v1_generated_NetworkService_ListNetworks_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "admanager_v1_generated_network_service_list_networks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.admanager_v1.OrderServiceClient", + "shortName": "OrderServiceClient" }, - "fullName": "google.ads.admanager_v1.LabelServiceClient.get_label", + "fullName": "google.ads.admanager_v1.OrderServiceClient.get_order", "method": { - "fullName": "google.ads.admanager.v1.LabelService.GetLabel", + "fullName": "google.ads.admanager.v1.OrderService.GetOrder", "service": { - "fullName": "google.ads.admanager.v1.LabelService", - "shortName": "LabelService" + "fullName": "google.ads.admanager.v1.OrderService", + "shortName": "OrderService" }, - "shortName": "GetLabel" + "shortName": "GetOrder" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetLabelRequest" + "type": "google.ads.admanager_v1.types.GetOrderRequest" }, { "name": "name", @@ -1329,14 +1581,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Label", - "shortName": "get_label" + "resultType": "google.ads.admanager_v1.types.Order", + "shortName": "get_order" }, - "description": "Sample for GetLabel", - "file": "admanager_v1_generated_label_service_get_label_sync.py", + "description": "Sample for GetOrder", + "file": "admanager_v1_generated_order_service_get_order_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_LabelService_GetLabel_sync", + "regionTag": "admanager_v1_generated_OrderService_GetOrder_sync", "segments": [ { "end": 51, @@ -1369,28 +1621,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_label_service_get_label_sync.py" + "title": "admanager_v1_generated_order_service_get_order_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.LabelServiceClient", - "shortName": "LabelServiceClient" + "fullName": "google.ads.admanager_v1.OrderServiceClient", + "shortName": "OrderServiceClient" }, - "fullName": "google.ads.admanager_v1.LabelServiceClient.list_labels", + "fullName": "google.ads.admanager_v1.OrderServiceClient.list_orders", "method": { - "fullName": "google.ads.admanager.v1.LabelService.ListLabels", + "fullName": "google.ads.admanager.v1.OrderService.ListOrders", "service": { - "fullName": "google.ads.admanager.v1.LabelService", - "shortName": "LabelService" + "fullName": "google.ads.admanager.v1.OrderService", + "shortName": "OrderService" }, - "shortName": "ListLabels" + "shortName": "ListOrders" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListLabelsRequest" + "type": "google.ads.admanager_v1.types.ListOrdersRequest" }, { "name": "parent", @@ -1409,14 +1661,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.label_service.pagers.ListLabelsPager", - "shortName": "list_labels" + "resultType": "google.ads.admanager_v1.services.order_service.pagers.ListOrdersPager", + "shortName": "list_orders" }, - "description": "Sample for ListLabels", - "file": "admanager_v1_generated_label_service_list_labels_sync.py", + "description": "Sample for ListOrders", + "file": "admanager_v1_generated_order_service_list_orders_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_LabelService_ListLabels_sync", + "regionTag": "admanager_v1_generated_OrderService_ListOrders_sync", "segments": [ { "end": 52, @@ -1449,28 +1701,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_label_service_list_labels_sync.py" + "title": "admanager_v1_generated_order_service_list_orders_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.LineItemServiceClient", - "shortName": "LineItemServiceClient" + "fullName": "google.ads.admanager_v1.PlacementServiceClient", + "shortName": "PlacementServiceClient" }, - "fullName": "google.ads.admanager_v1.LineItemServiceClient.get_line_item", + "fullName": "google.ads.admanager_v1.PlacementServiceClient.get_placement", "method": { - "fullName": "google.ads.admanager.v1.LineItemService.GetLineItem", + "fullName": "google.ads.admanager.v1.PlacementService.GetPlacement", "service": { - "fullName": "google.ads.admanager.v1.LineItemService", - "shortName": "LineItemService" + "fullName": "google.ads.admanager.v1.PlacementService", + "shortName": "PlacementService" }, - "shortName": "GetLineItem" + "shortName": "GetPlacement" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetLineItemRequest" + "type": "google.ads.admanager_v1.types.GetPlacementRequest" }, { "name": "name", @@ -1489,14 +1741,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.LineItem", - "shortName": "get_line_item" + "resultType": "google.ads.admanager_v1.types.Placement", + "shortName": "get_placement" }, - "description": "Sample for GetLineItem", - "file": "admanager_v1_generated_line_item_service_get_line_item_sync.py", + "description": "Sample for GetPlacement", + "file": "admanager_v1_generated_placement_service_get_placement_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_LineItemService_GetLineItem_sync", + "regionTag": "admanager_v1_generated_PlacementService_GetPlacement_sync", "segments": [ { "end": 51, @@ -1529,28 +1781,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_line_item_service_get_line_item_sync.py" + "title": "admanager_v1_generated_placement_service_get_placement_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.LineItemServiceClient", - "shortName": "LineItemServiceClient" + "fullName": "google.ads.admanager_v1.PlacementServiceClient", + "shortName": "PlacementServiceClient" }, - "fullName": "google.ads.admanager_v1.LineItemServiceClient.list_line_items", + "fullName": "google.ads.admanager_v1.PlacementServiceClient.list_placements", "method": { - "fullName": "google.ads.admanager.v1.LineItemService.ListLineItems", + "fullName": "google.ads.admanager.v1.PlacementService.ListPlacements", "service": { - "fullName": "google.ads.admanager.v1.LineItemService", - "shortName": "LineItemService" + "fullName": "google.ads.admanager.v1.PlacementService", + "shortName": "PlacementService" }, - "shortName": "ListLineItems" + "shortName": "ListPlacements" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListLineItemsRequest" + "type": "google.ads.admanager_v1.types.ListPlacementsRequest" }, { "name": "parent", @@ -1569,14 +1821,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.line_item_service.pagers.ListLineItemsPager", - "shortName": "list_line_items" + "resultType": "google.ads.admanager_v1.services.placement_service.pagers.ListPlacementsPager", + "shortName": "list_placements" }, - "description": "Sample for ListLineItems", - "file": "admanager_v1_generated_line_item_service_list_line_items_sync.py", + "description": "Sample for ListPlacements", + "file": "admanager_v1_generated_placement_service_list_placements_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_LineItemService_ListLineItems_sync", + "regionTag": "admanager_v1_generated_PlacementService_ListPlacements_sync", "segments": [ { "end": 52, @@ -1609,33 +1861,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_line_item_service_list_line_items_sync.py" + "title": "admanager_v1_generated_placement_service_list_placements_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.NetworkServiceClient", - "shortName": "NetworkServiceClient" + "fullName": "google.ads.admanager_v1.ReportServiceClient", + "shortName": "ReportServiceClient" }, - "fullName": "google.ads.admanager_v1.NetworkServiceClient.get_network", + "fullName": "google.ads.admanager_v1.ReportServiceClient.create_report", "method": { - "fullName": "google.ads.admanager.v1.NetworkService.GetNetwork", + "fullName": "google.ads.admanager.v1.ReportService.CreateReport", "service": { - "fullName": "google.ads.admanager.v1.NetworkService", - "shortName": "NetworkService" + "fullName": "google.ads.admanager.v1.ReportService", + "shortName": "ReportService" }, - "shortName": "GetNetwork" + "shortName": "CreateReport" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetNetworkRequest" + "type": "google.ads.admanager_v1.types.CreateReportRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "report", + "type": "google.ads.admanager_v1.types.Report" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1649,22 +1905,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Network", - "shortName": "get_network" + "resultType": "google.ads.admanager_v1.types.Report", + "shortName": "create_report" }, - "description": "Sample for GetNetwork", - "file": "admanager_v1_generated_network_service_get_network_sync.py", + "description": "Sample for CreateReport", + "file": "admanager_v1_generated_report_service_create_report_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_NetworkService_GetNetwork_sync", + "regionTag": "admanager_v1_generated_ReportService_CreateReport_sync", "segments": [ { - "end": 51, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 57, "start": 27, "type": "SHORT" }, @@ -1674,43 +1930,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_network_service_get_network_sync.py" + "title": "admanager_v1_generated_report_service_create_report_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.OrderServiceClient", - "shortName": "OrderServiceClient" + "fullName": "google.ads.admanager_v1.ReportServiceClient", + "shortName": "ReportServiceClient" }, - "fullName": "google.ads.admanager_v1.OrderServiceClient.get_order", + "fullName": "google.ads.admanager_v1.ReportServiceClient.fetch_report_result_rows", "method": { - "fullName": "google.ads.admanager.v1.OrderService.GetOrder", + "fullName": "google.ads.admanager.v1.ReportService.FetchReportResultRows", "service": { - "fullName": "google.ads.admanager.v1.OrderService", - "shortName": "OrderService" + "fullName": "google.ads.admanager.v1.ReportService", + "shortName": "ReportService" }, - "shortName": "GetOrder" + "shortName": "FetchReportResultRows" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetOrderRequest" + "type": "google.ads.admanager_v1.types.FetchReportResultRowsRequest" }, { "name": "name", @@ -1729,14 +1985,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Order", - "shortName": "get_order" + "resultType": "google.ads.admanager_v1.services.report_service.pagers.FetchReportResultRowsPager", + "shortName": "fetch_report_result_rows" }, - "description": "Sample for GetOrder", - "file": "admanager_v1_generated_order_service_get_order_sync.py", + "description": "Sample for FetchReportResultRows", + "file": "admanager_v1_generated_report_service_fetch_report_result_rows_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_OrderService_GetOrder_sync", + "regionTag": "admanager_v1_generated_ReportService_FetchReportResultRows_sync", "segments": [ { "end": 51, @@ -1754,46 +2010,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { "end": 52, - "start": 49, + "start": 48, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_order_service_get_order_sync.py" + "title": "admanager_v1_generated_report_service_fetch_report_result_rows_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.OrderServiceClient", - "shortName": "OrderServiceClient" + "fullName": "google.ads.admanager_v1.ReportServiceClient", + "shortName": "ReportServiceClient" }, - "fullName": "google.ads.admanager_v1.OrderServiceClient.list_orders", + "fullName": "google.ads.admanager_v1.ReportServiceClient.get_report", "method": { - "fullName": "google.ads.admanager.v1.OrderService.ListOrders", + "fullName": "google.ads.admanager.v1.ReportService.GetReport", "service": { - "fullName": "google.ads.admanager.v1.OrderService", - "shortName": "OrderService" + "fullName": "google.ads.admanager.v1.ReportService", + "shortName": "ReportService" }, - "shortName": "ListOrders" + "shortName": "GetReport" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListOrdersRequest" + "type": "google.ads.admanager_v1.types.GetReportRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1809,22 +2065,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.order_service.pagers.ListOrdersPager", - "shortName": "list_orders" + "resultType": "google.ads.admanager_v1.types.Report", + "shortName": "get_report" }, - "description": "Sample for ListOrders", - "file": "admanager_v1_generated_order_service_list_orders_sync.py", + "description": "Sample for GetReport", + "file": "admanager_v1_generated_report_service_get_report_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_OrderService_ListOrders_sync", + "regionTag": "admanager_v1_generated_ReportService_GetReport_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1844,36 +2100,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_order_service_list_orders_sync.py" + "title": "admanager_v1_generated_report_service_get_report_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.PlacementServiceClient", - "shortName": "PlacementServiceClient" + "fullName": "google.ads.admanager_v1.ReportServiceClient", + "shortName": "ReportServiceClient" }, - "fullName": "google.ads.admanager_v1.PlacementServiceClient.get_placement", + "fullName": "google.ads.admanager_v1.ReportServiceClient.list_reports", "method": { - "fullName": "google.ads.admanager.v1.PlacementService.GetPlacement", + "fullName": "google.ads.admanager.v1.ReportService.ListReports", "service": { - "fullName": "google.ads.admanager.v1.PlacementService", - "shortName": "PlacementService" + "fullName": "google.ads.admanager.v1.ReportService", + "shortName": "ReportService" }, - "shortName": "GetPlacement" + "shortName": "ListReports" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetPlacementRequest" + "type": "google.ads.admanager_v1.types.ListReportsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1889,22 +2145,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Placement", - "shortName": "get_placement" + "resultType": "google.ads.admanager_v1.services.report_service.pagers.ListReportsPager", + "shortName": "list_reports" }, - "description": "Sample for GetPlacement", - "file": "admanager_v1_generated_placement_service_get_placement_sync.py", + "description": "Sample for ListReports", + "file": "admanager_v1_generated_report_service_list_reports_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_PlacementService_GetPlacement_sync", + "regionTag": "admanager_v1_generated_ReportService_ListReports_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1924,36 +2180,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_placement_service_get_placement_sync.py" + "title": "admanager_v1_generated_report_service_list_reports_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.PlacementServiceClient", - "shortName": "PlacementServiceClient" + "fullName": "google.ads.admanager_v1.ReportServiceClient", + "shortName": "ReportServiceClient" }, - "fullName": "google.ads.admanager_v1.PlacementServiceClient.list_placements", + "fullName": "google.ads.admanager_v1.ReportServiceClient.run_report", "method": { - "fullName": "google.ads.admanager.v1.PlacementService.ListPlacements", + "fullName": "google.ads.admanager.v1.ReportService.RunReport", "service": { - "fullName": "google.ads.admanager.v1.PlacementService", - "shortName": "PlacementService" + "fullName": "google.ads.admanager.v1.ReportService", + "shortName": "ReportService" }, - "shortName": "ListPlacements" + "shortName": "RunReport" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListPlacementsRequest" + "type": "google.ads.admanager_v1.types.RunReportRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1969,22 +2225,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.placement_service.pagers.ListPlacementsPager", - "shortName": "list_placements" + "resultType": "google.api_core.operation.Operation", + "shortName": "run_report" }, - "description": "Sample for ListPlacements", - "file": "admanager_v1_generated_placement_service_list_placements_sync.py", + "description": "Sample for RunReport", + "file": "admanager_v1_generated_report_service_run_report_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_PlacementService_ListPlacements_sync", + "regionTag": "admanager_v1_generated_ReportService_RunReport_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1999,17 +2255,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_placement_service_list_placements_sync.py" + "title": "admanager_v1_generated_report_service_run_report_sync.py" }, { "canonical": true, @@ -2018,23 +2274,27 @@ "fullName": "google.ads.admanager_v1.ReportServiceClient", "shortName": "ReportServiceClient" }, - "fullName": "google.ads.admanager_v1.ReportServiceClient.export_saved_report", + "fullName": "google.ads.admanager_v1.ReportServiceClient.update_report", "method": { - "fullName": "google.ads.admanager.v1.ReportService.ExportSavedReport", + "fullName": "google.ads.admanager.v1.ReportService.UpdateReport", "service": { "fullName": "google.ads.admanager.v1.ReportService", "shortName": "ReportService" }, - "shortName": "ExportSavedReport" + "shortName": "UpdateReport" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ExportSavedReportRequest" + "type": "google.ads.admanager_v1.types.UpdateReportRequest" }, { "name": "report", - "type": "str" + "type": "google.ads.admanager_v1.types.Report" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -2049,22 +2309,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "export_saved_report" + "resultType": "google.ads.admanager_v1.types.Report", + "shortName": "update_report" }, - "description": "Sample for ExportSavedReport", - "file": "admanager_v1_generated_report_service_export_saved_report_sync.py", + "description": "Sample for UpdateReport", + "file": "admanager_v1_generated_report_service_update_report_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_ReportService_ExportSavedReport_sync", + "regionTag": "admanager_v1_generated_ReportService_UpdateReport_sync", "segments": [ { - "end": 55, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 56, "start": 27, "type": "SHORT" }, @@ -2074,22 +2334,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_report_service_export_saved_report_sync.py" + "title": "admanager_v1_generated_report_service_update_report_sync.py" }, { "canonical": true, @@ -2255,22 +2515,22 @@ "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.TeamServiceClient", - "shortName": "TeamServiceClient" + "fullName": "google.ads.admanager_v1.TaxonomyCategoryServiceClient", + "shortName": "TaxonomyCategoryServiceClient" }, - "fullName": "google.ads.admanager_v1.TeamServiceClient.get_team", + "fullName": "google.ads.admanager_v1.TaxonomyCategoryServiceClient.get_taxonomy_category", "method": { - "fullName": "google.ads.admanager.v1.TeamService.GetTeam", + "fullName": "google.ads.admanager.v1.TaxonomyCategoryService.GetTaxonomyCategory", "service": { - "fullName": "google.ads.admanager.v1.TeamService", - "shortName": "TeamService" + "fullName": "google.ads.admanager.v1.TaxonomyCategoryService", + "shortName": "TaxonomyCategoryService" }, - "shortName": "GetTeam" + "shortName": "GetTaxonomyCategory" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.GetTeamRequest" + "type": "google.ads.admanager_v1.types.GetTaxonomyCategoryRequest" }, { "name": "name", @@ -2289,14 +2549,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.types.Team", - "shortName": "get_team" + "resultType": "google.ads.admanager_v1.types.TaxonomyCategory", + "shortName": "get_taxonomy_category" }, - "description": "Sample for GetTeam", - "file": "admanager_v1_generated_team_service_get_team_sync.py", + "description": "Sample for GetTaxonomyCategory", + "file": "admanager_v1_generated_taxonomy_category_service_get_taxonomy_category_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_TeamService_GetTeam_sync", + "regionTag": "admanager_v1_generated_TaxonomyCategoryService_GetTaxonomyCategory_sync", "segments": [ { "end": 51, @@ -2329,28 +2589,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_team_service_get_team_sync.py" + "title": "admanager_v1_generated_taxonomy_category_service_get_taxonomy_category_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.ads.admanager_v1.TeamServiceClient", - "shortName": "TeamServiceClient" + "fullName": "google.ads.admanager_v1.TaxonomyCategoryServiceClient", + "shortName": "TaxonomyCategoryServiceClient" }, - "fullName": "google.ads.admanager_v1.TeamServiceClient.list_teams", + "fullName": "google.ads.admanager_v1.TaxonomyCategoryServiceClient.list_taxonomy_categories", "method": { - "fullName": "google.ads.admanager.v1.TeamService.ListTeams", + "fullName": "google.ads.admanager.v1.TaxonomyCategoryService.ListTaxonomyCategories", "service": { - "fullName": "google.ads.admanager.v1.TeamService", - "shortName": "TeamService" + "fullName": "google.ads.admanager.v1.TaxonomyCategoryService", + "shortName": "TaxonomyCategoryService" }, - "shortName": "ListTeams" + "shortName": "ListTaxonomyCategories" }, "parameters": [ { "name": "request", - "type": "google.ads.admanager_v1.types.ListTeamsRequest" + "type": "google.ads.admanager_v1.types.ListTaxonomyCategoriesRequest" }, { "name": "parent", @@ -2369,14 +2629,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.ads.admanager_v1.services.team_service.pagers.ListTeamsPager", - "shortName": "list_teams" + "resultType": "google.ads.admanager_v1.services.taxonomy_category_service.pagers.ListTaxonomyCategoriesPager", + "shortName": "list_taxonomy_categories" }, - "description": "Sample for ListTeams", - "file": "admanager_v1_generated_team_service_list_teams_sync.py", + "description": "Sample for ListTaxonomyCategories", + "file": "admanager_v1_generated_taxonomy_category_service_list_taxonomy_categories_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_TeamService_ListTeams_sync", + "regionTag": "admanager_v1_generated_TaxonomyCategoryService_ListTaxonomyCategories_sync", "segments": [ { "end": 52, @@ -2409,7 +2669,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "admanager_v1_generated_team_service_list_teams_sync.py" + "title": "admanager_v1_generated_taxonomy_category_service_list_taxonomy_categories_sync.py" }, { "canonical": true, @@ -2490,86 +2750,6 @@ } ], "title": "admanager_v1_generated_user_service_get_user_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.ads.admanager_v1.UserServiceClient", - "shortName": "UserServiceClient" - }, - "fullName": "google.ads.admanager_v1.UserServiceClient.list_users", - "method": { - "fullName": "google.ads.admanager.v1.UserService.ListUsers", - "service": { - "fullName": "google.ads.admanager.v1.UserService", - "shortName": "UserService" - }, - "shortName": "ListUsers" - }, - "parameters": [ - { - "name": "request", - "type": "google.ads.admanager_v1.types.ListUsersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.ads.admanager_v1.services.user_service.pagers.ListUsersPager", - "shortName": "list_users" - }, - "description": "Sample for ListUsers", - "file": "admanager_v1_generated_user_service_list_users_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "admanager_v1_generated_UserService_ListUsers_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "admanager_v1_generated_user_service_list_users_sync.py" } ] } diff --git a/packages/google-ads-admanager/scripts/client-post-processing/doc-formatting.yaml b/packages/google-ads-admanager/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-ads-admanager/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/packages/google-ads-admanager/scripts/fixup_admanager_v1_keywords.py b/packages/google-ads-admanager/scripts/fixup_admanager_v1_keywords.py index f8aeaf3c31f3..72c4b05e14f3 100644 --- a/packages/google-ads-admanager/scripts/fixup_admanager_v1_keywords.py +++ b/packages/google-ads-admanager/scripts/fixup_admanager_v1_keywords.py @@ -39,38 +39,40 @@ def partition( class admanagerCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'export_saved_report': ('format_', 'report', 'include_report_properties', 'include_ids', 'include_totals_row', 'file_name', ), - 'get_ad_partner': ('name', ), + 'batch_create_entity_signals_mappings': ('parent', 'requests', ), + 'batch_update_entity_signals_mappings': ('parent', 'requests', ), + 'create_entity_signals_mapping': ('parent', 'entity_signals_mapping', ), + 'create_report': ('parent', 'report', ), + 'fetch_report_result_rows': ('name', 'page_size', 'page_token', ), 'get_ad_unit': ('name', ), 'get_company': ('name', ), - 'get_contact': ('name', ), - 'get_creative': ('name', ), 'get_custom_field': ('name', ), 'get_custom_targeting_key': ('name', ), 'get_custom_targeting_value': ('name', ), - 'get_label': ('name', ), - 'get_line_item': ('name', ), + 'get_entity_signals_mapping': ('name', ), 'get_network': ('name', ), 'get_order': ('name', ), 'get_placement': ('name', ), + 'get_report': ('name', ), 'get_role': ('name', ), - 'get_team': ('name', ), + 'get_taxonomy_category': ('name', ), 'get_user': ('name', ), - 'list_ad_partners': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_ad_units': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), + 'list_ad_unit_sizes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_companies': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), - 'list_contacts': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), - 'list_creatives': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_custom_fields': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_custom_targeting_keys': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_custom_targeting_values': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), - 'list_labels': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), - 'list_line_items': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), + 'list_entity_signals_mappings': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), + 'list_networks': (), 'list_orders': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_placements': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), + 'list_reports': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), 'list_roles': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), - 'list_teams': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), - 'list_users': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), + 'list_taxonomy_categories': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'skip', ), + 'run_report': ('name', ), + 'update_entity_signals_mapping': ('entity_signals_mapping', 'update_mask', ), + 'update_report': ('report', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py index 4aef4911e6dd..6f4aca46282b 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py @@ -53,8 +53,8 @@ ) from google.ads.admanager_v1.types import ( ad_unit_enums, + ad_unit_messages, ad_unit_service, - ad_unit_size, applied_label, ) @@ -975,23 +975,23 @@ def test_get_ad_unit_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ad_unit_service.AdUnit( + return_value = ad_unit_messages.AdUnit( name="name_value", ad_unit_id=1040, parent_ad_unit="parent_ad_unit_value", display_name="display_name_value", ad_unit_code="ad_unit_code_value", - status=ad_unit_service.AdUnit.Status.ACTIVE, - target_window=ad_unit_service.TargetWindowEnum.TargetWindow.TOP, + status=ad_unit_enums.AdUnitStatusEnum.AdUnitStatus.ACTIVE, + applied_target_window=ad_unit_enums.TargetWindowEnum.TargetWindow.TOP, + effective_target_window=ad_unit_enums.TargetWindowEnum.TargetWindow.TOP, applied_teams=["applied_teams_value"], teams=["teams_value"], description="description_value", explicitly_targeted=True, has_children=True, external_set_top_box_channel_id="external_set_top_box_channel_id_value", - ctv_application_id=1900, - smart_size_mode=ad_unit_service.SmartSizeModeEnum.SmartSizeMode.NONE, - applied_adsense_enabled=ad_unit_enums.AppliedAdsenseEnabledEnum.AppliedAdsenseEnabled.TRUE, + smart_size_mode=ad_unit_enums.SmartSizeModeEnum.SmartSizeMode.NONE, + applied_adsense_enabled=True, effective_adsense_enabled=True, ) @@ -999,7 +999,7 @@ def test_get_ad_unit_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_unit_service.AdUnit.pb(return_value) + return_value = ad_unit_messages.AdUnit.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1007,14 +1007,21 @@ def test_get_ad_unit_rest(request_type): response = client.get_ad_unit(request) # Establish that the response is the type that we expect. - assert isinstance(response, ad_unit_service.AdUnit) + assert isinstance(response, ad_unit_messages.AdUnit) assert response.name == "name_value" assert response.ad_unit_id == 1040 assert response.parent_ad_unit == "parent_ad_unit_value" assert response.display_name == "display_name_value" assert response.ad_unit_code == "ad_unit_code_value" - assert response.status == ad_unit_service.AdUnit.Status.ACTIVE - assert response.target_window == ad_unit_service.TargetWindowEnum.TargetWindow.TOP + assert response.status == ad_unit_enums.AdUnitStatusEnum.AdUnitStatus.ACTIVE + assert ( + response.applied_target_window + == ad_unit_enums.TargetWindowEnum.TargetWindow.TOP + ) + assert ( + response.effective_target_window + == ad_unit_enums.TargetWindowEnum.TargetWindow.TOP + ) assert response.applied_teams == ["applied_teams_value"] assert response.teams == ["teams_value"] assert response.description == "description_value" @@ -1024,14 +1031,10 @@ def test_get_ad_unit_rest(request_type): response.external_set_top_box_channel_id == "external_set_top_box_channel_id_value" ) - assert response.ctv_application_id == 1900 - assert ( - response.smart_size_mode == ad_unit_service.SmartSizeModeEnum.SmartSizeMode.NONE - ) assert ( - response.applied_adsense_enabled - == ad_unit_enums.AppliedAdsenseEnabledEnum.AppliedAdsenseEnabled.TRUE + response.smart_size_mode == ad_unit_enums.SmartSizeModeEnum.SmartSizeMode.NONE ) + assert response.applied_adsense_enabled is True assert response.effective_adsense_enabled is True @@ -1111,7 +1114,7 @@ def test_get_ad_unit_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ad_unit_service.AdUnit() + return_value = ad_unit_messages.AdUnit() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1132,7 +1135,7 @@ def test_get_ad_unit_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_unit_service.AdUnit.pb(return_value) + return_value = ad_unit_messages.AdUnit.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1187,8 +1190,8 @@ def test_get_ad_unit_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ad_unit_service.AdUnit.to_json( - ad_unit_service.AdUnit() + req.return_value._content = ad_unit_messages.AdUnit.to_json( + ad_unit_messages.AdUnit() ) request = ad_unit_service.GetAdUnitRequest() @@ -1197,7 +1200,7 @@ def test_get_ad_unit_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ad_unit_service.AdUnit() + post.return_value = ad_unit_messages.AdUnit() client.get_ad_unit( request, @@ -1244,7 +1247,7 @@ def test_get_ad_unit_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ad_unit_service.AdUnit() + return_value = ad_unit_messages.AdUnit() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/adUnits/sample2"} @@ -1259,7 +1262,7 @@ def test_get_ad_unit_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_unit_service.AdUnit.pb(return_value) + return_value = ad_unit_messages.AdUnit.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1628,9 +1631,9 @@ def test_list_ad_units_rest_pager(transport: str = "rest"): response = ( ad_unit_service.ListAdUnitsResponse( ad_units=[ - ad_unit_service.AdUnit(), - ad_unit_service.AdUnit(), - ad_unit_service.AdUnit(), + ad_unit_messages.AdUnit(), + ad_unit_messages.AdUnit(), + ad_unit_messages.AdUnit(), ], next_page_token="abc", ), @@ -1640,14 +1643,14 @@ def test_list_ad_units_rest_pager(transport: str = "rest"): ), ad_unit_service.ListAdUnitsResponse( ad_units=[ - ad_unit_service.AdUnit(), + ad_unit_messages.AdUnit(), ], next_page_token="ghi", ), ad_unit_service.ListAdUnitsResponse( ad_units=[ - ad_unit_service.AdUnit(), - ad_unit_service.AdUnit(), + ad_unit_messages.AdUnit(), + ad_unit_messages.AdUnit(), ], ), ) @@ -1670,13 +1673,398 @@ def test_list_ad_units_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, ad_unit_service.AdUnit) for i in results) + assert all(isinstance(i, ad_unit_messages.AdUnit) for i in results) pages = list(client.list_ad_units(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + ad_unit_service.ListAdUnitSizesRequest, + dict, + ], +) +def test_list_ad_unit_sizes_rest(request_type): + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ad_unit_service.ListAdUnitSizesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ad_unit_service.ListAdUnitSizesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_ad_unit_sizes(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAdUnitSizesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_ad_unit_sizes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_ad_unit_sizes in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_ad_unit_sizes + ] = mock_rpc + + request = {} + client.list_ad_unit_sizes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_ad_unit_sizes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_ad_unit_sizes_rest_required_fields( + request_type=ad_unit_service.ListAdUnitSizesRequest, +): + transport_class = transports.AdUnitServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_ad_unit_sizes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_ad_unit_sizes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "skip", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ad_unit_service.ListAdUnitSizesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ad_unit_service.ListAdUnitSizesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_ad_unit_sizes(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_ad_unit_sizes_rest_unset_required_fields(): + transport = transports.AdUnitServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_ad_unit_sizes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "skip", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_ad_unit_sizes_rest_interceptors(null_interceptor): + transport = transports.AdUnitServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AdUnitServiceRestInterceptor(), + ) + client = AdUnitServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AdUnitServiceRestInterceptor, "post_list_ad_unit_sizes" + ) as post, mock.patch.object( + transports.AdUnitServiceRestInterceptor, "pre_list_ad_unit_sizes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ad_unit_service.ListAdUnitSizesRequest.pb( + ad_unit_service.ListAdUnitSizesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ad_unit_service.ListAdUnitSizesResponse.to_json( + ad_unit_service.ListAdUnitSizesResponse() + ) + + request = ad_unit_service.ListAdUnitSizesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ad_unit_service.ListAdUnitSizesResponse() + + client.list_ad_unit_sizes( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_ad_unit_sizes_rest_bad_request( + transport: str = "rest", request_type=ad_unit_service.ListAdUnitSizesRequest +): + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_ad_unit_sizes(request) + + +def test_list_ad_unit_sizes_rest_flattened(): + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ad_unit_service.ListAdUnitSizesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ad_unit_service.ListAdUnitSizesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_ad_unit_sizes(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/adUnitSizes" % client.transport._host, args[1] + ) + + +def test_list_ad_unit_sizes_rest_flattened_error(transport: str = "rest"): + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_ad_unit_sizes( + ad_unit_service.ListAdUnitSizesRequest(), + parent="parent_value", + ) + + +def test_list_ad_unit_sizes_rest_pager(transport: str = "rest"): + client = AdUnitServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + ad_unit_service.ListAdUnitSizesResponse( + ad_unit_sizes=[ + ad_unit_messages.AdUnitSize(), + ad_unit_messages.AdUnitSize(), + ad_unit_messages.AdUnitSize(), + ], + next_page_token="abc", + ), + ad_unit_service.ListAdUnitSizesResponse( + ad_unit_sizes=[], + next_page_token="def", + ), + ad_unit_service.ListAdUnitSizesResponse( + ad_unit_sizes=[ + ad_unit_messages.AdUnitSize(), + ], + next_page_token="ghi", + ), + ad_unit_service.ListAdUnitSizesResponse( + ad_unit_sizes=[ + ad_unit_messages.AdUnitSize(), + ad_unit_messages.AdUnitSize(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + ad_unit_service.ListAdUnitSizesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "networks/sample1"} + + pager = client.list_ad_unit_sizes(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, ad_unit_messages.AdUnitSize) for i in results) + + pages = list(client.list_ad_unit_sizes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AdUnitServiceRestTransport( @@ -1789,6 +2177,7 @@ def test_ad_unit_service_base_transport(): methods = ( "get_ad_unit", "list_ad_units", + "list_ad_unit_sizes", "get_operation", ) for method in methods: @@ -1927,6 +2316,9 @@ def test_ad_unit_service_client_transport_session_collision(transport_name): session1 = client1.transport.list_ad_units._session session2 = client2.transport.list_ad_units._session assert session1 != session2 + session1 = client1.transport.list_ad_unit_sizes._session + session2 = client2.transport.list_ad_unit_sizes._session + assert session1 != session2 def test_ad_unit_path(): @@ -2154,7 +2546,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2181,7 +2573,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py index 817775eba7e3..c64524fa3284 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py @@ -36,6 +36,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -52,6 +53,7 @@ from google.ads.admanager_v1.types import ( applied_label, company_credit_status_enum, + company_messages, company_service, company_type_enum, ) @@ -977,7 +979,7 @@ def test_get_company_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = company_service.Company( + return_value = company_messages.Company( name="name_value", company_id=1059, display_name="display_name_value", @@ -991,13 +993,14 @@ def test_get_company_rest(request_type): credit_status=company_credit_status_enum.CompanyCreditStatusEnum.CompanyCreditStatus.ACTIVE, primary_contact="primary_contact_value", applied_teams=["applied_teams_value"], + third_party_company_id=2348, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = company_service.Company.pb(return_value) + return_value = company_messages.Company.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1005,7 +1008,7 @@ def test_get_company_rest(request_type): response = client.get_company(request) # Establish that the response is the type that we expect. - assert isinstance(response, company_service.Company) + assert isinstance(response, company_messages.Company) assert response.name == "name_value" assert response.company_id == 1059 assert response.display_name == "display_name_value" @@ -1022,6 +1025,7 @@ def test_get_company_rest(request_type): ) assert response.primary_contact == "primary_contact_value" assert response.applied_teams == ["applied_teams_value"] + assert response.third_party_company_id == 2348 def test_get_company_rest_use_cached_wrapped_rpc(): @@ -1100,7 +1104,7 @@ def test_get_company_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = company_service.Company() + return_value = company_messages.Company() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1121,7 +1125,7 @@ def test_get_company_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = company_service.Company.pb(return_value) + return_value = company_messages.Company.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1176,8 +1180,8 @@ def test_get_company_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = company_service.Company.to_json( - company_service.Company() + req.return_value._content = company_messages.Company.to_json( + company_messages.Company() ) request = company_service.GetCompanyRequest() @@ -1186,7 +1190,7 @@ def test_get_company_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = company_service.Company() + post.return_value = company_messages.Company() client.get_company( request, @@ -1233,7 +1237,7 @@ def test_get_company_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = company_service.Company() + return_value = company_messages.Company() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/companies/sample2"} @@ -1248,7 +1252,7 @@ def test_get_company_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = company_service.Company.pb(return_value) + return_value = company_messages.Company.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1617,9 +1621,9 @@ def test_list_companies_rest_pager(transport: str = "rest"): response = ( company_service.ListCompaniesResponse( companies=[ - company_service.Company(), - company_service.Company(), - company_service.Company(), + company_messages.Company(), + company_messages.Company(), + company_messages.Company(), ], next_page_token="abc", ), @@ -1629,14 +1633,14 @@ def test_list_companies_rest_pager(transport: str = "rest"): ), company_service.ListCompaniesResponse( companies=[ - company_service.Company(), + company_messages.Company(), ], next_page_token="ghi", ), company_service.ListCompaniesResponse( companies=[ - company_service.Company(), - company_service.Company(), + company_messages.Company(), + company_messages.Company(), ], ), ) @@ -1659,7 +1663,7 @@ def test_list_companies_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, company_service.Company) for i in results) + assert all(isinstance(i, company_messages.Company) for i in results) pages = list(client.list_companies(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -2166,7 +2170,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2193,7 +2197,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py index 422c0de35f80..68e7487bb7ca 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py @@ -49,7 +49,11 @@ pagers, transports, ) -from google.ads.admanager_v1.types import custom_field_enums, custom_field_service +from google.ads.admanager_v1.types import ( + custom_field_enums, + custom_field_messages, + custom_field_service, +) def client_cert_source_callback(): @@ -1004,7 +1008,7 @@ def test_get_custom_field_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = custom_field_service.CustomField( + return_value = custom_field_messages.CustomField( name="name_value", custom_field_id=1578, display_name="display_name_value", @@ -1019,7 +1023,7 @@ def test_get_custom_field_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_field_service.CustomField.pb(return_value) + return_value = custom_field_messages.CustomField.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1027,7 +1031,7 @@ def test_get_custom_field_rest(request_type): response = client.get_custom_field(request) # Establish that the response is the type that we expect. - assert isinstance(response, custom_field_service.CustomField) + assert isinstance(response, custom_field_messages.CustomField) assert response.name == "name_value" assert response.custom_field_id == 1578 assert response.display_name == "display_name_value" @@ -1128,7 +1132,7 @@ def test_get_custom_field_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = custom_field_service.CustomField() + return_value = custom_field_messages.CustomField() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1149,7 +1153,7 @@ def test_get_custom_field_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_field_service.CustomField.pb(return_value) + return_value = custom_field_messages.CustomField.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1204,8 +1208,8 @@ def test_get_custom_field_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = custom_field_service.CustomField.to_json( - custom_field_service.CustomField() + req.return_value._content = custom_field_messages.CustomField.to_json( + custom_field_messages.CustomField() ) request = custom_field_service.GetCustomFieldRequest() @@ -1214,7 +1218,7 @@ def test_get_custom_field_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = custom_field_service.CustomField() + post.return_value = custom_field_messages.CustomField() client.get_custom_field( request, @@ -1261,7 +1265,7 @@ def test_get_custom_field_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = custom_field_service.CustomField() + return_value = custom_field_messages.CustomField() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/customFields/sample2"} @@ -1276,7 +1280,7 @@ def test_get_custom_field_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_field_service.CustomField.pb(return_value) + return_value = custom_field_messages.CustomField.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1653,9 +1657,9 @@ def test_list_custom_fields_rest_pager(transport: str = "rest"): response = ( custom_field_service.ListCustomFieldsResponse( custom_fields=[ - custom_field_service.CustomField(), - custom_field_service.CustomField(), - custom_field_service.CustomField(), + custom_field_messages.CustomField(), + custom_field_messages.CustomField(), + custom_field_messages.CustomField(), ], next_page_token="abc", ), @@ -1665,14 +1669,14 @@ def test_list_custom_fields_rest_pager(transport: str = "rest"): ), custom_field_service.ListCustomFieldsResponse( custom_fields=[ - custom_field_service.CustomField(), + custom_field_messages.CustomField(), ], next_page_token="ghi", ), custom_field_service.ListCustomFieldsResponse( custom_fields=[ - custom_field_service.CustomField(), - custom_field_service.CustomField(), + custom_field_messages.CustomField(), + custom_field_messages.CustomField(), ], ), ) @@ -1695,7 +1699,7 @@ def test_list_custom_fields_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, custom_field_service.CustomField) for i in results) + assert all(isinstance(i, custom_field_messages.CustomField) for i in results) pages = list(client.list_custom_fields(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -2133,7 +2137,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2160,7 +2164,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py index 552cd9447d1c..bf29a7231ef8 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py @@ -51,6 +51,7 @@ ) from google.ads.admanager_v1.types import ( custom_targeting_key_enums, + custom_targeting_key_messages, custom_targeting_key_service, ) @@ -1035,7 +1036,7 @@ def test_get_custom_targeting_key_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = custom_targeting_key_service.CustomTargetingKey( + return_value = custom_targeting_key_messages.CustomTargetingKey( name="name_value", custom_targeting_key_id=2451, ad_tag_name="ad_tag_name_value", @@ -1049,7 +1050,7 @@ def test_get_custom_targeting_key_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_targeting_key_service.CustomTargetingKey.pb(return_value) + return_value = custom_targeting_key_messages.CustomTargetingKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1057,7 +1058,7 @@ def test_get_custom_targeting_key_rest(request_type): response = client.get_custom_targeting_key(request) # Establish that the response is the type that we expect. - assert isinstance(response, custom_targeting_key_service.CustomTargetingKey) + assert isinstance(response, custom_targeting_key_messages.CustomTargetingKey) assert response.name == "name_value" assert response.custom_targeting_key_id == 2451 assert response.ad_tag_name == "ad_tag_name_value" @@ -1157,7 +1158,7 @@ def test_get_custom_targeting_key_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = custom_targeting_key_service.CustomTargetingKey() + return_value = custom_targeting_key_messages.CustomTargetingKey() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1178,7 +1179,7 @@ def test_get_custom_targeting_key_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_targeting_key_service.CustomTargetingKey.pb( + return_value = custom_targeting_key_messages.CustomTargetingKey.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -1238,8 +1239,8 @@ def test_get_custom_targeting_key_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - custom_targeting_key_service.CustomTargetingKey.to_json( - custom_targeting_key_service.CustomTargetingKey() + custom_targeting_key_messages.CustomTargetingKey.to_json( + custom_targeting_key_messages.CustomTargetingKey() ) ) @@ -1249,7 +1250,7 @@ def test_get_custom_targeting_key_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = custom_targeting_key_service.CustomTargetingKey() + post.return_value = custom_targeting_key_messages.CustomTargetingKey() client.get_custom_targeting_key( request, @@ -1297,7 +1298,7 @@ def test_get_custom_targeting_key_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = custom_targeting_key_service.CustomTargetingKey() + return_value = custom_targeting_key_messages.CustomTargetingKey() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/customTargetingKeys/sample2"} @@ -1312,7 +1313,7 @@ def test_get_custom_targeting_key_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_targeting_key_service.CustomTargetingKey.pb(return_value) + return_value = custom_targeting_key_messages.CustomTargetingKey.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1703,9 +1704,9 @@ def test_list_custom_targeting_keys_rest_pager(transport: str = "rest"): response = ( custom_targeting_key_service.ListCustomTargetingKeysResponse( custom_targeting_keys=[ - custom_targeting_key_service.CustomTargetingKey(), - custom_targeting_key_service.CustomTargetingKey(), - custom_targeting_key_service.CustomTargetingKey(), + custom_targeting_key_messages.CustomTargetingKey(), + custom_targeting_key_messages.CustomTargetingKey(), + custom_targeting_key_messages.CustomTargetingKey(), ], next_page_token="abc", ), @@ -1715,14 +1716,14 @@ def test_list_custom_targeting_keys_rest_pager(transport: str = "rest"): ), custom_targeting_key_service.ListCustomTargetingKeysResponse( custom_targeting_keys=[ - custom_targeting_key_service.CustomTargetingKey(), + custom_targeting_key_messages.CustomTargetingKey(), ], next_page_token="ghi", ), custom_targeting_key_service.ListCustomTargetingKeysResponse( custom_targeting_keys=[ - custom_targeting_key_service.CustomTargetingKey(), - custom_targeting_key_service.CustomTargetingKey(), + custom_targeting_key_messages.CustomTargetingKey(), + custom_targeting_key_messages.CustomTargetingKey(), ], ), ) @@ -1747,7 +1748,7 @@ def test_list_custom_targeting_keys_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 assert all( - isinstance(i, custom_targeting_key_service.CustomTargetingKey) + isinstance(i, custom_targeting_key_messages.CustomTargetingKey) for i in results ) @@ -2195,7 +2196,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2222,7 +2223,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py index f3836087467d..a6ac047ff983 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py @@ -51,6 +51,7 @@ ) from google.ads.admanager_v1.types import ( custom_targeting_value_enums, + custom_targeting_value_messages, custom_targeting_value_service, ) @@ -1041,7 +1042,7 @@ def test_get_custom_targeting_value_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = custom_targeting_value_service.CustomTargetingValue( + return_value = custom_targeting_value_messages.CustomTargetingValue( name="name_value", ad_tag_name="ad_tag_name_value", display_name="display_name_value", @@ -1053,7 +1054,7 @@ def test_get_custom_targeting_value_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_targeting_value_service.CustomTargetingValue.pb( + return_value = custom_targeting_value_messages.CustomTargetingValue.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -1063,7 +1064,7 @@ def test_get_custom_targeting_value_rest(request_type): response = client.get_custom_targeting_value(request) # Establish that the response is the type that we expect. - assert isinstance(response, custom_targeting_value_service.CustomTargetingValue) + assert isinstance(response, custom_targeting_value_messages.CustomTargetingValue) assert response.name == "name_value" assert response.ad_tag_name == "ad_tag_name_value" assert response.display_name == "display_name_value" @@ -1158,7 +1159,7 @@ def test_get_custom_targeting_value_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = custom_targeting_value_service.CustomTargetingValue() + return_value = custom_targeting_value_messages.CustomTargetingValue() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1179,7 +1180,7 @@ def test_get_custom_targeting_value_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_targeting_value_service.CustomTargetingValue.pb( + return_value = custom_targeting_value_messages.CustomTargetingValue.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -1239,8 +1240,8 @@ def test_get_custom_targeting_value_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - custom_targeting_value_service.CustomTargetingValue.to_json( - custom_targeting_value_service.CustomTargetingValue() + custom_targeting_value_messages.CustomTargetingValue.to_json( + custom_targeting_value_messages.CustomTargetingValue() ) ) @@ -1250,7 +1251,7 @@ def test_get_custom_targeting_value_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = custom_targeting_value_service.CustomTargetingValue() + post.return_value = custom_targeting_value_messages.CustomTargetingValue() client.get_custom_targeting_value( request, @@ -1300,7 +1301,7 @@ def test_get_custom_targeting_value_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = custom_targeting_value_service.CustomTargetingValue() + return_value = custom_targeting_value_messages.CustomTargetingValue() # get arguments that satisfy an http rule for this method sample_request = { @@ -1317,7 +1318,7 @@ def test_get_custom_targeting_value_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = custom_targeting_value_service.CustomTargetingValue.pb( + return_value = custom_targeting_value_messages.CustomTargetingValue.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -1718,9 +1719,9 @@ def test_list_custom_targeting_values_rest_pager(transport: str = "rest"): response = ( custom_targeting_value_service.ListCustomTargetingValuesResponse( custom_targeting_values=[ - custom_targeting_value_service.CustomTargetingValue(), - custom_targeting_value_service.CustomTargetingValue(), - custom_targeting_value_service.CustomTargetingValue(), + custom_targeting_value_messages.CustomTargetingValue(), + custom_targeting_value_messages.CustomTargetingValue(), + custom_targeting_value_messages.CustomTargetingValue(), ], next_page_token="abc", ), @@ -1730,14 +1731,14 @@ def test_list_custom_targeting_values_rest_pager(transport: str = "rest"): ), custom_targeting_value_service.ListCustomTargetingValuesResponse( custom_targeting_values=[ - custom_targeting_value_service.CustomTargetingValue(), + custom_targeting_value_messages.CustomTargetingValue(), ], next_page_token="ghi", ), custom_targeting_value_service.ListCustomTargetingValuesResponse( custom_targeting_values=[ - custom_targeting_value_service.CustomTargetingValue(), - custom_targeting_value_service.CustomTargetingValue(), + custom_targeting_value_messages.CustomTargetingValue(), + custom_targeting_value_messages.CustomTargetingValue(), ], ), ) @@ -1762,7 +1763,7 @@ def test_list_custom_targeting_values_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 assert all( - isinstance(i, custom_targeting_value_service.CustomTargetingValue) + isinstance(i, custom_targeting_value_messages.CustomTargetingValue) for i in results ) @@ -2218,7 +2219,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2245,7 +2246,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py new file mode 100644 index 000000000000..1fb7a9de78ae --- /dev/null +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py @@ -0,0 +1,3898 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ads.admanager_v1.services.entity_signals_mapping_service import ( + EntitySignalsMappingServiceClient, + pagers, + transports, +) +from google.ads.admanager_v1.types import ( + entity_signals_mapping_messages, + entity_signals_mapping_service, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert EntitySignalsMappingServiceClient._get_default_mtls_endpoint(None) is None + assert ( + EntitySignalsMappingServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + EntitySignalsMappingServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + EntitySignalsMappingServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + EntitySignalsMappingServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + EntitySignalsMappingServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + EntitySignalsMappingServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + EntitySignalsMappingServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert EntitySignalsMappingServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + EntitySignalsMappingServiceClient._get_client_cert_source(None, False) is None + ) + assert ( + EntitySignalsMappingServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + EntitySignalsMappingServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + EntitySignalsMappingServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + EntitySignalsMappingServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + EntitySignalsMappingServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EntitySignalsMappingServiceClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + EntitySignalsMappingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = EntitySignalsMappingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == EntitySignalsMappingServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == EntitySignalsMappingServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == EntitySignalsMappingServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + EntitySignalsMappingServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + EntitySignalsMappingServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + EntitySignalsMappingServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + EntitySignalsMappingServiceClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + EntitySignalsMappingServiceClient._get_universe_domain(None, None) + == EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + EntitySignalsMappingServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (EntitySignalsMappingServiceClient, "rest"), + ], +) +def test_entity_signals_mapping_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "admanager.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://admanager.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.EntitySignalsMappingServiceRestTransport, "rest"), + ], +) +def test_entity_signals_mapping_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (EntitySignalsMappingServiceClient, "rest"), + ], +) +def test_entity_signals_mapping_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "admanager.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://admanager.googleapis.com" + ) + + +def test_entity_signals_mapping_service_client_get_transport_class(): + transport = EntitySignalsMappingServiceClient.get_transport_class() + available_transports = [ + transports.EntitySignalsMappingServiceRestTransport, + ] + assert transport in available_transports + + transport = EntitySignalsMappingServiceClient.get_transport_class("rest") + assert transport == transports.EntitySignalsMappingServiceRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + EntitySignalsMappingServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EntitySignalsMappingServiceClient), +) +def test_entity_signals_mapping_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + EntitySignalsMappingServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + EntitySignalsMappingServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + "rest", + "true", + ), + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + EntitySignalsMappingServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EntitySignalsMappingServiceClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_entity_signals_mapping_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [EntitySignalsMappingServiceClient]) +@mock.patch.object( + EntitySignalsMappingServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(EntitySignalsMappingServiceClient), +) +def test_entity_signals_mapping_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [EntitySignalsMappingServiceClient]) +@mock.patch.object( + EntitySignalsMappingServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EntitySignalsMappingServiceClient), +) +def test_entity_signals_mapping_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = EntitySignalsMappingServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + EntitySignalsMappingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = EntitySignalsMappingServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + "rest", + ), + ], +) +def test_entity_signals_mapping_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + "rest", + None, + ), + ], +) +def test_entity_signals_mapping_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + entity_signals_mapping_service.GetEntitySignalsMappingRequest, + dict, + ], +) +def test_get_entity_signals_mapping_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "networks/sample1/entitySignalsMappings/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping( + name="name_value", + entity_signals_mapping_id=2660, + taxonomy_category_ids=[2267], + audience_segment_id=1980, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_entity_signals_mapping(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, entity_signals_mapping_messages.EntitySignalsMapping) + assert response.name == "name_value" + assert response.entity_signals_mapping_id == 2660 + assert response.taxonomy_category_ids == [2267] + + +def test_get_entity_signals_mapping_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_entity_signals_mapping + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_entity_signals_mapping + ] = mock_rpc + + request = {} + client.get_entity_signals_mapping(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_entity_signals_mapping(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_entity_signals_mapping_rest_required_fields( + request_type=entity_signals_mapping_service.GetEntitySignalsMappingRequest, +): + transport_class = transports.EntitySignalsMappingServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_entity_signals_mapping._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_entity_signals_mapping._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_entity_signals_mapping(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_entity_signals_mapping_rest_unset_required_fields(): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_entity_signals_mapping._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_entity_signals_mapping_rest_interceptors(null_interceptor): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EntitySignalsMappingServiceRestInterceptor(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_get_entity_signals_mapping", + ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "pre_get_entity_signals_mapping", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = entity_signals_mapping_service.GetEntitySignalsMappingRequest.pb( + entity_signals_mapping_service.GetEntitySignalsMappingRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + entity_signals_mapping_messages.EntitySignalsMapping.to_json( + entity_signals_mapping_messages.EntitySignalsMapping() + ) + ) + + request = entity_signals_mapping_service.GetEntitySignalsMappingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = entity_signals_mapping_messages.EntitySignalsMapping() + + client.get_entity_signals_mapping( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_entity_signals_mapping_rest_bad_request( + transport: str = "rest", + request_type=entity_signals_mapping_service.GetEntitySignalsMappingRequest, +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "networks/sample1/entitySignalsMappings/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_entity_signals_mapping(request) + + +def test_get_entity_signals_mapping_rest_flattened(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "networks/sample1/entitySignalsMappings/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_entity_signals_mapping(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=networks/*/entitySignalsMappings/*}" % client.transport._host, + args[1], + ) + + +def test_get_entity_signals_mapping_rest_flattened_error(transport: str = "rest"): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entity_signals_mapping( + entity_signals_mapping_service.GetEntitySignalsMappingRequest(), + name="name_value", + ) + + +def test_get_entity_signals_mapping_rest_error(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + entity_signals_mapping_service.ListEntitySignalsMappingsRequest, + dict, + ], +) +def test_list_entity_signals_mappings_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_service.ListEntitySignalsMappingsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_entity_signals_mappings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntitySignalsMappingsPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_entity_signals_mappings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_entity_signals_mappings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_entity_signals_mappings + ] = mock_rpc + + request = {} + client.list_entity_signals_mappings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_entity_signals_mappings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_entity_signals_mappings_rest_required_fields( + request_type=entity_signals_mapping_service.ListEntitySignalsMappingsRequest, +): + transport_class = transports.EntitySignalsMappingServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_entity_signals_mappings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_entity_signals_mappings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "skip", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_service.ListEntitySignalsMappingsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_entity_signals_mappings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_entity_signals_mappings_rest_unset_required_fields(): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_entity_signals_mappings._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "skip", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_entity_signals_mappings_rest_interceptors(null_interceptor): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EntitySignalsMappingServiceRestInterceptor(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_list_entity_signals_mappings", + ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "pre_list_entity_signals_mappings", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = entity_signals_mapping_service.ListEntitySignalsMappingsRequest.pb( + entity_signals_mapping_service.ListEntitySignalsMappingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.to_json( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse() + ) + ) + + request = entity_signals_mapping_service.ListEntitySignalsMappingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse() + ) + + client.list_entity_signals_mappings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_entity_signals_mappings_rest_bad_request( + transport: str = "rest", + request_type=entity_signals_mapping_service.ListEntitySignalsMappingsRequest, +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_entity_signals_mappings(request) + + +def test_list_entity_signals_mappings_rest_flattened(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_entity_signals_mappings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/entitySignalsMappings" % client.transport._host, + args[1], + ) + + +def test_list_entity_signals_mappings_rest_flattened_error(transport: str = "rest"): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entity_signals_mappings( + entity_signals_mapping_service.ListEntitySignalsMappingsRequest(), + parent="parent_value", + ) + + +def test_list_entity_signals_mappings_rest_pager(transport: str = "rest"): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse( + entity_signals_mappings=[ + entity_signals_mapping_messages.EntitySignalsMapping(), + entity_signals_mapping_messages.EntitySignalsMapping(), + entity_signals_mapping_messages.EntitySignalsMapping(), + ], + next_page_token="abc", + ), + entity_signals_mapping_service.ListEntitySignalsMappingsResponse( + entity_signals_mappings=[], + next_page_token="def", + ), + entity_signals_mapping_service.ListEntitySignalsMappingsResponse( + entity_signals_mappings=[ + entity_signals_mapping_messages.EntitySignalsMapping(), + ], + next_page_token="ghi", + ), + entity_signals_mapping_service.ListEntitySignalsMappingsResponse( + entity_signals_mappings=[ + entity_signals_mapping_messages.EntitySignalsMapping(), + entity_signals_mapping_messages.EntitySignalsMapping(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "networks/sample1"} + + pager = client.list_entity_signals_mappings(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, entity_signals_mapping_messages.EntitySignalsMapping) + for i in results + ) + + pages = list(client.list_entity_signals_mappings(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest, + dict, + ], +) +def test_create_entity_signals_mapping_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request_init["entity_signals_mapping"] = { + "audience_segment_id": 1980, + "content_bundle_id": 1792, + "custom_targeting_value_id": 2663, + "name": "name_value", + "entity_signals_mapping_id": 2660, + "taxonomy_category_ids": [2268, 2269], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + entity_signals_mapping_service.CreateEntitySignalsMappingRequest.meta.fields[ + "entity_signals_mapping" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "entity_signals_mapping" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entity_signals_mapping"][field])): + del request_init["entity_signals_mapping"][field][i][subfield] + else: + del request_init["entity_signals_mapping"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping( + name="name_value", + entity_signals_mapping_id=2660, + taxonomy_category_ids=[2267], + audience_segment_id=1980, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_entity_signals_mapping(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, entity_signals_mapping_messages.EntitySignalsMapping) + assert response.name == "name_value" + assert response.entity_signals_mapping_id == 2660 + assert response.taxonomy_category_ids == [2267] + + +def test_create_entity_signals_mapping_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_entity_signals_mapping + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_entity_signals_mapping + ] = mock_rpc + + request = {} + client.create_entity_signals_mapping(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_entity_signals_mapping(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_entity_signals_mapping_rest_required_fields( + request_type=entity_signals_mapping_service.CreateEntitySignalsMappingRequest, +): + transport_class = transports.EntitySignalsMappingServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_entity_signals_mapping._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_entity_signals_mapping._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_entity_signals_mapping(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_entity_signals_mapping_rest_unset_required_fields(): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_entity_signals_mapping._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "entitySignalsMapping", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_entity_signals_mapping_rest_interceptors(null_interceptor): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EntitySignalsMappingServiceRestInterceptor(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_create_entity_signals_mapping", + ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "pre_create_entity_signals_mapping", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + entity_signals_mapping_service.CreateEntitySignalsMappingRequest.pb( + entity_signals_mapping_service.CreateEntitySignalsMappingRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + entity_signals_mapping_messages.EntitySignalsMapping.to_json( + entity_signals_mapping_messages.EntitySignalsMapping() + ) + ) + + request = entity_signals_mapping_service.CreateEntitySignalsMappingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = entity_signals_mapping_messages.EntitySignalsMapping() + + client.create_entity_signals_mapping( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_entity_signals_mapping_rest_bad_request( + transport: str = "rest", + request_type=entity_signals_mapping_service.CreateEntitySignalsMappingRequest, +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_entity_signals_mapping(request) + + +def test_create_entity_signals_mapping_rest_flattened(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + entity_signals_mapping=entity_signals_mapping_messages.EntitySignalsMapping( + audience_segment_id=1980 + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_entity_signals_mapping(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/entitySignalsMappings" % client.transport._host, + args[1], + ) + + +def test_create_entity_signals_mapping_rest_flattened_error(transport: str = "rest"): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entity_signals_mapping( + entity_signals_mapping_service.CreateEntitySignalsMappingRequest(), + parent="parent_value", + entity_signals_mapping=entity_signals_mapping_messages.EntitySignalsMapping( + audience_segment_id=1980 + ), + ) + + +def test_create_entity_signals_mapping_rest_error(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, + dict, + ], +) +def test_update_entity_signals_mapping_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "entity_signals_mapping": { + "name": "networks/sample1/entitySignalsMappings/sample2" + } + } + request_init["entity_signals_mapping"] = { + "audience_segment_id": 1980, + "content_bundle_id": 1792, + "custom_targeting_value_id": 2663, + "name": "networks/sample1/entitySignalsMappings/sample2", + "entity_signals_mapping_id": 2660, + "taxonomy_category_ids": [2268, 2269], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest.meta.fields[ + "entity_signals_mapping" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "entity_signals_mapping" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entity_signals_mapping"][field])): + del request_init["entity_signals_mapping"][field][i][subfield] + else: + del request_init["entity_signals_mapping"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping( + name="name_value", + entity_signals_mapping_id=2660, + taxonomy_category_ids=[2267], + audience_segment_id=1980, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_entity_signals_mapping(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, entity_signals_mapping_messages.EntitySignalsMapping) + assert response.name == "name_value" + assert response.entity_signals_mapping_id == 2660 + assert response.taxonomy_category_ids == [2267] + + +def test_update_entity_signals_mapping_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_entity_signals_mapping + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_entity_signals_mapping + ] = mock_rpc + + request = {} + client.update_entity_signals_mapping(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_entity_signals_mapping(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_entity_signals_mapping_rest_required_fields( + request_type=entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, +): + transport_class = transports.EntitySignalsMappingServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_entity_signals_mapping._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_entity_signals_mapping._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_entity_signals_mapping(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_entity_signals_mapping_rest_unset_required_fields(): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_entity_signals_mapping._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "entitySignalsMapping", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_entity_signals_mapping_rest_interceptors(null_interceptor): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EntitySignalsMappingServiceRestInterceptor(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_update_entity_signals_mapping", + ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "pre_update_entity_signals_mapping", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest.pb( + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + entity_signals_mapping_messages.EntitySignalsMapping.to_json( + entity_signals_mapping_messages.EntitySignalsMapping() + ) + ) + + request = entity_signals_mapping_service.UpdateEntitySignalsMappingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = entity_signals_mapping_messages.EntitySignalsMapping() + + client.update_entity_signals_mapping( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_entity_signals_mapping_rest_bad_request( + transport: str = "rest", + request_type=entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "entity_signals_mapping": { + "name": "networks/sample1/entitySignalsMappings/sample2" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_entity_signals_mapping(request) + + +def test_update_entity_signals_mapping_rest_flattened(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = entity_signals_mapping_messages.EntitySignalsMapping() + + # get arguments that satisfy an http rule for this method + sample_request = { + "entity_signals_mapping": { + "name": "networks/sample1/entitySignalsMappings/sample2" + } + } + + # get truthy value for each flattened field + mock_args = dict( + entity_signals_mapping=entity_signals_mapping_messages.EntitySignalsMapping( + audience_segment_id=1980 + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = entity_signals_mapping_messages.EntitySignalsMapping.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_entity_signals_mapping(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{entity_signals_mapping.name=networks/*/entitySignalsMappings/*}" + % client.transport._host, + args[1], + ) + + +def test_update_entity_signals_mapping_rest_flattened_error(transport: str = "rest"): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entity_signals_mapping( + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest(), + entity_signals_mapping=entity_signals_mapping_messages.EntitySignalsMapping( + audience_segment_id=1980 + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_entity_signals_mapping_rest_error(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, + dict, + ], +) +def test_batch_create_entity_signals_mappings_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_create_entity_signals_mappings(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + ) + + +def test_batch_create_entity_signals_mappings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_create_entity_signals_mappings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_create_entity_signals_mappings + ] = mock_rpc + + request = {} + client.batch_create_entity_signals_mappings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_create_entity_signals_mappings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_create_entity_signals_mappings_rest_required_fields( + request_type=entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, +): + transport_class = transports.EntitySignalsMappingServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_entity_signals_mappings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_entity_signals_mappings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_create_entity_signals_mappings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_create_entity_signals_mappings_rest_unset_required_fields(): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.batch_create_entity_signals_mappings._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_entity_signals_mappings_rest_interceptors(null_interceptor): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EntitySignalsMappingServiceRestInterceptor(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_batch_create_entity_signals_mappings", + ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "pre_batch_create_entity_signals_mappings", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest.pb( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse.to_json( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + + request = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + + client.batch_create_entity_signals_mappings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_create_entity_signals_mappings_rest_bad_request( + transport: str = "rest", + request_type=entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest, +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_create_entity_signals_mappings(request) + + +def test_batch_create_entity_signals_mappings_rest_flattened(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + requests=[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest( + parent="parent_value" + ) + ], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_create_entity_signals_mappings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/entitySignalsMappings:batchCreate" + % client.transport._host, + args[1], + ) + + +def test_batch_create_entity_signals_mappings_rest_flattened_error( + transport: str = "rest", +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_create_entity_signals_mappings( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest(), + parent="parent_value", + requests=[ + entity_signals_mapping_service.CreateEntitySignalsMappingRequest( + parent="parent_value" + ) + ], + ) + + +def test_batch_create_entity_signals_mappings_rest_error(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, + dict, + ], +) +def test_batch_update_entity_signals_mappings_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.batch_update_entity_signals_mappings(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + ) + + +def test_batch_update_entity_signals_mappings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_update_entity_signals_mappings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_update_entity_signals_mappings + ] = mock_rpc + + request = {} + client.batch_update_entity_signals_mappings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_update_entity_signals_mappings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_update_entity_signals_mappings_rest_required_fields( + request_type=entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, +): + transport_class = transports.EntitySignalsMappingServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_update_entity_signals_mappings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_update_entity_signals_mappings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.batch_update_entity_signals_mappings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_update_entity_signals_mappings_rest_unset_required_fields(): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.batch_update_entity_signals_mappings._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_update_entity_signals_mappings_rest_interceptors(null_interceptor): + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EntitySignalsMappingServiceRestInterceptor(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_batch_update_entity_signals_mappings", + ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "pre_batch_update_entity_signals_mappings", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest.pb( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse.to_json( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + + request = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + + client.batch_update_entity_signals_mappings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_update_entity_signals_mappings_rest_bad_request( + transport: str = "rest", + request_type=entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_update_entity_signals_mappings(request) + + +def test_batch_update_entity_signals_mappings_rest_flattened(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + requests=[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest( + entity_signals_mapping=entity_signals_mapping_messages.EntitySignalsMapping( + audience_segment_id=1980 + ) + ) + ], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_update_entity_signals_mappings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/entitySignalsMappings:batchUpdate" + % client.transport._host, + args[1], + ) + + +def test_batch_update_entity_signals_mappings_rest_flattened_error( + transport: str = "rest", +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_update_entity_signals_mappings( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest(), + parent="parent_value", + requests=[ + entity_signals_mapping_service.UpdateEntitySignalsMappingRequest( + entity_signals_mapping=entity_signals_mapping_messages.EntitySignalsMapping( + audience_segment_id=1980 + ) + ) + ], + ) + + +def test_batch_update_entity_signals_mappings_rest_error(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EntitySignalsMappingServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EntitySignalsMappingServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EntitySignalsMappingServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EntitySignalsMappingServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.EntitySignalsMappingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = EntitySignalsMappingServiceClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EntitySignalsMappingServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = EntitySignalsMappingServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_entity_signals_mapping_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.EntitySignalsMappingServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_entity_signals_mapping_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ads.admanager_v1.services.entity_signals_mapping_service.transports.EntitySignalsMappingServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.EntitySignalsMappingServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_entity_signals_mapping", + "list_entity_signals_mappings", + "create_entity_signals_mapping", + "update_entity_signals_mapping", + "batch_create_entity_signals_mappings", + "batch_update_entity_signals_mappings", + "get_operation", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_entity_signals_mapping_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ads.admanager_v1.services.entity_signals_mapping_service.transports.EntitySignalsMappingServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EntitySignalsMappingServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_entity_signals_mapping_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ads.admanager_v1.services.entity_signals_mapping_service.transports.EntitySignalsMappingServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EntitySignalsMappingServiceTransport() + adc.assert_called_once() + + +def test_entity_signals_mapping_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + EntitySignalsMappingServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +def test_entity_signals_mapping_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.EntitySignalsMappingServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_entity_signals_mapping_service_host_no_port(transport_name): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="admanager.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "admanager.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://admanager.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_entity_signals_mapping_service_host_with_port(transport_name): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="admanager.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "admanager.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://admanager.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_entity_signals_mapping_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = EntitySignalsMappingServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = EntitySignalsMappingServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_entity_signals_mapping._session + session2 = client2.transport.get_entity_signals_mapping._session + assert session1 != session2 + session1 = client1.transport.list_entity_signals_mappings._session + session2 = client2.transport.list_entity_signals_mappings._session + assert session1 != session2 + session1 = client1.transport.create_entity_signals_mapping._session + session2 = client2.transport.create_entity_signals_mapping._session + assert session1 != session2 + session1 = client1.transport.update_entity_signals_mapping._session + session2 = client2.transport.update_entity_signals_mapping._session + assert session1 != session2 + session1 = client1.transport.batch_create_entity_signals_mappings._session + session2 = client2.transport.batch_create_entity_signals_mappings._session + assert session1 != session2 + session1 = client1.transport.batch_update_entity_signals_mappings._session + session2 = client2.transport.batch_update_entity_signals_mappings._session + assert session1 != session2 + + +def test_entity_signals_mapping_path(): + network_code = "squid" + entity_signals_mapping = "clam" + expected = ( + "networks/{network_code}/entitySignalsMappings/{entity_signals_mapping}".format( + network_code=network_code, + entity_signals_mapping=entity_signals_mapping, + ) + ) + actual = EntitySignalsMappingServiceClient.entity_signals_mapping_path( + network_code, entity_signals_mapping + ) + assert expected == actual + + +def test_parse_entity_signals_mapping_path(): + expected = { + "network_code": "whelk", + "entity_signals_mapping": "octopus", + } + path = EntitySignalsMappingServiceClient.entity_signals_mapping_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_entity_signals_mapping_path(path) + assert expected == actual + + +def test_network_path(): + network_code = "oyster" + expected = "networks/{network_code}".format( + network_code=network_code, + ) + actual = EntitySignalsMappingServiceClient.network_path(network_code) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "network_code": "nudibranch", + } + path = EntitySignalsMappingServiceClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_network_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = EntitySignalsMappingServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = EntitySignalsMappingServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = EntitySignalsMappingServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = EntitySignalsMappingServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = EntitySignalsMappingServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = EntitySignalsMappingServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = EntitySignalsMappingServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = EntitySignalsMappingServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = EntitySignalsMappingServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = EntitySignalsMappingServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = EntitySignalsMappingServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.EntitySignalsMappingServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.EntitySignalsMappingServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = EntitySignalsMappingServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "networks/sample1/operations/reports/runs/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = EntitySignalsMappingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + EntitySignalsMappingServiceClient, + transports.EntitySignalsMappingServiceRestTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_label_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_label_service.py deleted file mode 100644 index acbbbef71582..000000000000 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_label_service.py +++ /dev/null @@ -1,2151 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os - -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -from collections.abc import Iterable -import json -import math - -from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template -from google.api_core import api_core_version, client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -import google.auth -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import json_format -import grpc -from grpc.experimental import aio -from proto.marshal.rules import wrappers -from proto.marshal.rules.dates import DurationRule, TimestampRule -import pytest -from requests import PreparedRequest, Request, Response -from requests.sessions import Session - -from google.ads.admanager_v1.services.label_service import ( - LabelServiceClient, - pagers, - transports, -) -from google.ads.admanager_v1.types import label_service - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return ( - "foo.googleapis.com" - if ("localhost" in client.DEFAULT_ENDPOINT) - else client.DEFAULT_ENDPOINT - ) - - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return ( - "test.{UNIVERSE_DOMAIN}" - if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) - else client._DEFAULT_ENDPOINT_TEMPLATE - ) - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert LabelServiceClient._get_default_mtls_endpoint(None) is None - assert ( - LabelServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - ) - assert ( - LabelServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) - == api_mtls_endpoint - ) - assert ( - LabelServiceClient._get_default_mtls_endpoint(sandbox_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - LabelServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) - == sandbox_mtls_endpoint - ) - assert LabelServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -def test__read_environment_variables(): - assert LabelServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert LabelServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert LabelServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - LabelServiceClient._read_environment_variables() - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert LabelServiceClient._read_environment_variables() == ( - False, - "never", - None, - ) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert LabelServiceClient._read_environment_variables() == ( - False, - "always", - None, - ) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert LabelServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - LabelServiceClient._read_environment_variables() - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert LabelServiceClient._read_environment_variables() == ( - False, - "auto", - "foo.com", - ) - - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert LabelServiceClient._get_client_cert_source(None, False) is None - assert ( - LabelServiceClient._get_client_cert_source(mock_provided_cert_source, False) - is None - ) - assert ( - LabelServiceClient._get_client_cert_source(mock_provided_cert_source, True) - == mock_provided_cert_source - ) - - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", return_value=True - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=mock_default_cert_source, - ): - assert ( - LabelServiceClient._get_client_cert_source(None, True) - is mock_default_cert_source - ) - assert ( - LabelServiceClient._get_client_cert_source( - mock_provided_cert_source, "true" - ) - is mock_provided_cert_source - ) - - -@mock.patch.object( - LabelServiceClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LabelServiceClient), -) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = LabelServiceClient._DEFAULT_UNIVERSE - default_endpoint = LabelServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=default_universe - ) - mock_universe = "bar.com" - mock_endpoint = LabelServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=mock_universe - ) - - assert ( - LabelServiceClient._get_api_endpoint( - api_override, mock_client_cert_source, default_universe, "always" - ) - == api_override - ) - assert ( - LabelServiceClient._get_api_endpoint( - None, mock_client_cert_source, default_universe, "auto" - ) - == LabelServiceClient.DEFAULT_MTLS_ENDPOINT - ) - assert ( - LabelServiceClient._get_api_endpoint(None, None, default_universe, "auto") - == default_endpoint - ) - assert ( - LabelServiceClient._get_api_endpoint(None, None, default_universe, "always") - == LabelServiceClient.DEFAULT_MTLS_ENDPOINT - ) - assert ( - LabelServiceClient._get_api_endpoint( - None, mock_client_cert_source, default_universe, "always" - ) - == LabelServiceClient.DEFAULT_MTLS_ENDPOINT - ) - assert ( - LabelServiceClient._get_api_endpoint(None, None, mock_universe, "never") - == mock_endpoint - ) - assert ( - LabelServiceClient._get_api_endpoint(None, None, default_universe, "never") - == default_endpoint - ) - - with pytest.raises(MutualTLSChannelError) as excinfo: - LabelServiceClient._get_api_endpoint( - None, mock_client_cert_source, mock_universe, "auto" - ) - assert ( - str(excinfo.value) - == "mTLS is not supported in any universe other than googleapis.com." - ) - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert ( - LabelServiceClient._get_universe_domain( - client_universe_domain, universe_domain_env - ) - == client_universe_domain - ) - assert ( - LabelServiceClient._get_universe_domain(None, universe_domain_env) - == universe_domain_env - ) - assert ( - LabelServiceClient._get_universe_domain(None, None) - == LabelServiceClient._DEFAULT_UNIVERSE - ) - - with pytest.raises(ValueError) as excinfo: - LabelServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (LabelServiceClient, transports.LabelServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (LabelServiceClient, "rest"), - ], -) -def test_label_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_info" - ) as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "admanager.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_class,transport_name", - [ - (transports.LabelServiceRestTransport, "rest"), - ], -) -def test_label_service_client_service_account_always_use_jwt( - transport_class, transport_name -): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (LabelServiceClient, "rest"), - ], -) -def test_label_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_file" - ) as factory: - factory.return_value = creds - client = client_class.from_service_account_file( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "admanager.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" - ) - - -def test_label_service_client_get_transport_class(): - transport = LabelServiceClient.get_transport_class() - available_transports = [ - transports.LabelServiceRestTransport, - ] - assert transport in available_transports - - transport = LabelServiceClient.get_transport_class("rest") - assert transport == transports.LabelServiceRestTransport - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (LabelServiceClient, transports.LabelServiceRestTransport, "rest"), - ], -) -@mock.patch.object( - LabelServiceClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LabelServiceClient), -) -def test_label_service_client_client_options( - client_class, transport_class, transport_name -): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(LabelServiceClient, "get_transport_class") as gtc: - transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(LabelServiceClient, "get_transport_class") as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions( - api_audience="https://language.googleapis.com" - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com", - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,use_client_cert_env", - [ - (LabelServiceClient, transports.LabelServiceRestTransport, "rest", "true"), - (LabelServiceClient, transports.LabelServiceRestTransport, "rest", "false"), - ], -) -@mock.patch.object( - LabelServiceClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LabelServiceClient), -) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_label_service_client_mtls_env_auto( - client_class, transport_class, transport_name, use_client_cert_env -): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=client_cert_source_callback, - ): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [LabelServiceClient]) -@mock.patch.object( - LabelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LabelServiceClient) -) -def test_label_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=mock_client_cert_source, - ): - ( - api_endpoint, - cert_source, - ) = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - - -@pytest.mark.parametrize("client_class", [LabelServiceClient]) -@mock.patch.object( - LabelServiceClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LabelServiceClient), -) -def test_label_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = LabelServiceClient._DEFAULT_UNIVERSE - default_endpoint = LabelServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=default_universe - ) - mock_universe = "bar.com" - mock_endpoint = LabelServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=mock_universe - ) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ): - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=api_override - ) - client = client_class( - client_options=options, - credentials=ga_credentials.AnonymousCredentials(), - ) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - else: - client = client_class( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - assert client.api_endpoint == ( - mock_endpoint if universe_exists else default_endpoint - ) - assert client.universe_domain == ( - mock_universe if universe_exists else default_universe - ) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (LabelServiceClient, transports.LabelServiceRestTransport, "rest"), - ], -) -def test_label_service_client_client_options_scopes( - client_class, transport_class, transport_name -): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - (LabelServiceClient, transports.LabelServiceRestTransport, "rest", None), - ], -) -def test_label_service_client_client_options_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "request_type", - [ - label_service.GetLabelRequest, - dict, - ], -) -def test_get_label_rest(request_type): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/labels/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = label_service.Label( - name="name_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = label_service.Label.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_label(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, label_service.Label) - assert response.name == "name_value" - - -def test_get_label_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_label in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_label] = mock_rpc - - request = {} - client.get_label(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_label(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_label_rest_required_fields(request_type=label_service.GetLabelRequest): - transport_class = transports.LabelServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_label._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_label._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = label_service.Label() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = label_service.Label.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_label(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_label_rest_unset_required_fields(): - transport = transports.LabelServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_label._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_label_rest_interceptors(null_interceptor): - transport = transports.LabelServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LabelServiceRestInterceptor(), - ) - client = LabelServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.LabelServiceRestInterceptor, "post_get_label" - ) as post, mock.patch.object( - transports.LabelServiceRestInterceptor, "pre_get_label" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = label_service.GetLabelRequest.pb(label_service.GetLabelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = label_service.Label.to_json(label_service.Label()) - - request = label_service.GetLabelRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = label_service.Label() - - client.get_label( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_label_rest_bad_request( - transport: str = "rest", request_type=label_service.GetLabelRequest -): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/labels/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_label(request) - - -def test_get_label_rest_flattened(): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = label_service.Label() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "networks/sample1/labels/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = label_service.Label.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_label(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=networks/*/labels/*}" % client.transport._host, args[1] - ) - - -def test_get_label_rest_flattened_error(transport: str = "rest"): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_label( - label_service.GetLabelRequest(), - name="name_value", - ) - - -def test_get_label_rest_error(): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - label_service.ListLabelsRequest, - dict, - ], -) -def test_list_labels_rest(request_type): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = label_service.ListLabelsResponse( - next_page_token="next_page_token_value", - total_size=1086, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = label_service.ListLabelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_labels(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLabelsPager) - assert response.next_page_token == "next_page_token_value" - assert response.total_size == 1086 - - -def test_list_labels_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_labels in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_labels] = mock_rpc - - request = {} - client.list_labels(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_labels(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_labels_rest_required_fields(request_type=label_service.ListLabelsRequest): - transport_class = transports.LabelServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_labels._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_labels._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "skip", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = label_service.ListLabelsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = label_service.ListLabelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_labels(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_labels_rest_unset_required_fields(): - transport = transports.LabelServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_labels._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "skip", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_labels_rest_interceptors(null_interceptor): - transport = transports.LabelServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LabelServiceRestInterceptor(), - ) - client = LabelServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.LabelServiceRestInterceptor, "post_list_labels" - ) as post, mock.patch.object( - transports.LabelServiceRestInterceptor, "pre_list_labels" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = label_service.ListLabelsRequest.pb( - label_service.ListLabelsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = label_service.ListLabelsResponse.to_json( - label_service.ListLabelsResponse() - ) - - request = label_service.ListLabelsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = label_service.ListLabelsResponse() - - client.list_labels( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_labels_rest_bad_request( - transport: str = "rest", request_type=label_service.ListLabelsRequest -): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_labels(request) - - -def test_list_labels_rest_flattened(): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = label_service.ListLabelsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "networks/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = label_service.ListLabelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_labels(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=networks/*}/labels" % client.transport._host, args[1] - ) - - -def test_list_labels_rest_flattened_error(transport: str = "rest"): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_labels( - label_service.ListLabelsRequest(), - parent="parent_value", - ) - - -def test_list_labels_rest_pager(transport: str = "rest"): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - label_service.ListLabelsResponse( - labels=[ - label_service.Label(), - label_service.Label(), - label_service.Label(), - ], - next_page_token="abc", - ), - label_service.ListLabelsResponse( - labels=[], - next_page_token="def", - ), - label_service.ListLabelsResponse( - labels=[ - label_service.Label(), - ], - next_page_token="ghi", - ), - label_service.ListLabelsResponse( - labels=[ - label_service.Label(), - label_service.Label(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(label_service.ListLabelsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "networks/sample1"} - - pager = client.list_labels(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, label_service.Label) for i in results) - - pages = list(client.list_labels(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.LabelServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.LabelServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LabelServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.LabelServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LabelServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LabelServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.LabelServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LabelServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.LabelServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = LabelServiceClient(transport=transport) - assert client.transport is transport - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LabelServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_transport_kind(transport_name): - transport = LabelServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_label_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LabelServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_label_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.ads.admanager_v1.services.label_service.transports.LabelServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.LabelServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "get_label", - "list_labels", - "get_operation", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_label_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.ads.admanager_v1.services.label_service.transports.LabelServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LabelServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=(), - quota_project_id="octopus", - ) - - -def test_label_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.ads.admanager_v1.services.label_service.transports.LabelServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LabelServiceTransport() - adc.assert_called_once() - - -def test_label_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LabelServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=(), - quota_project_id=None, - ) - - -def test_label_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.LabelServiceRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_label_service_host_no_port(transport_name): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="admanager.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "admanager.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_label_service_host_with_port(transport_name): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="admanager.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "admanager.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_label_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = LabelServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = LabelServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_label._session - session2 = client2.transport.get_label._session - assert session1 != session2 - session1 = client1.transport.list_labels._session - session2 = client2.transport.list_labels._session - assert session1 != session2 - - -def test_label_path(): - network_code = "squid" - label = "clam" - expected = "networks/{network_code}/labels/{label}".format( - network_code=network_code, - label=label, - ) - actual = LabelServiceClient.label_path(network_code, label) - assert expected == actual - - -def test_parse_label_path(): - expected = { - "network_code": "whelk", - "label": "octopus", - } - path = LabelServiceClient.label_path(**expected) - - # Check that the path construction is reversible. - actual = LabelServiceClient.parse_label_path(path) - assert expected == actual - - -def test_network_path(): - network_code = "oyster" - expected = "networks/{network_code}".format( - network_code=network_code, - ) - actual = LabelServiceClient.network_path(network_code) - assert expected == actual - - -def test_parse_network_path(): - expected = { - "network_code": "nudibranch", - } - path = LabelServiceClient.network_path(**expected) - - # Check that the path construction is reversible. - actual = LabelServiceClient.parse_network_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = LabelServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = LabelServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = LabelServiceClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = LabelServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = LabelServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = LabelServiceClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = LabelServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = LabelServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = LabelServiceClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = LabelServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = LabelServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = LabelServiceClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = LabelServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = LabelServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = LabelServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.LabelServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.LabelServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = LabelServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_transport_close(): - transports = { - "rest": "_session", - } - - for transport, close_name in transports.items(): - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - "rest", - ] - for transport in transports: - client = LabelServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - - -@pytest.mark.parametrize( - "client_class,transport_class", - [ - (LabelServiceClient, transports.LabelServiceRestTransport), - ], -) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py index 3f5e4ad96587..11622319f40d 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py @@ -48,7 +48,7 @@ NetworkServiceClient, transports, ) -from google.ads.admanager_v1.types import network_service +from google.ads.admanager_v1.types import network_messages, network_service def client_cert_source_callback(): @@ -971,7 +971,7 @@ def test_get_network_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = network_service.Network( + return_value = network_messages.Network( name="name_value", display_name="display_name_value", network_code="network_code_value", @@ -988,7 +988,7 @@ def test_get_network_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = network_service.Network.pb(return_value) + return_value = network_messages.Network.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -996,7 +996,7 @@ def test_get_network_rest(request_type): response = client.get_network(request) # Establish that the response is the type that we expect. - assert isinstance(response, network_service.Network) + assert isinstance(response, network_messages.Network) assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.network_code == "network_code_value" @@ -1085,7 +1085,7 @@ def test_get_network_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = network_service.Network() + return_value = network_messages.Network() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1106,7 +1106,7 @@ def test_get_network_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = network_service.Network.pb(return_value) + return_value = network_messages.Network.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1161,8 +1161,8 @@ def test_get_network_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = network_service.Network.to_json( - network_service.Network() + req.return_value._content = network_messages.Network.to_json( + network_messages.Network() ) request = network_service.GetNetworkRequest() @@ -1171,7 +1171,7 @@ def test_get_network_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = network_service.Network() + post.return_value = network_messages.Network() client.get_network( request, @@ -1218,7 +1218,7 @@ def test_get_network_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = network_service.Network() + return_value = network_messages.Network() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1"} @@ -1233,7 +1233,7 @@ def test_get_network_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = network_service.Network.pb(return_value) + return_value = network_messages.Network.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1270,6 +1270,166 @@ def test_get_network_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + network_service.ListNetworksRequest, + dict, + ], +) +def test_list_networks_rest(request_type): + client = NetworkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = network_service.ListNetworksResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = network_service.ListNetworksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_networks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, network_service.ListNetworksResponse) + + +def test_list_networks_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_networks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_networks] = mock_rpc + + request = {} + client.list_networks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_networks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_networks_rest_interceptors(null_interceptor): + transport = transports.NetworkServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkServiceRestInterceptor(), + ) + client = NetworkServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkServiceRestInterceptor, "post_list_networks" + ) as post, mock.patch.object( + transports.NetworkServiceRestInterceptor, "pre_list_networks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = network_service.ListNetworksRequest.pb( + network_service.ListNetworksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = network_service.ListNetworksResponse.to_json( + network_service.ListNetworksResponse() + ) + + request = network_service.ListNetworksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = network_service.ListNetworksResponse() + + client.list_networks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_networks_rest_bad_request( + transport: str = "rest", request_type=network_service.ListNetworksRequest +): + client = NetworkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_networks(request) + + +def test_list_networks_rest_error(): + client = NetworkServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.NetworkServiceRestTransport( @@ -1381,6 +1541,7 @@ def test_network_service_base_transport(): # raise NotImplementedError. methods = ( "get_network", + "list_networks", "get_operation", ) for method in methods: @@ -1516,6 +1677,9 @@ def test_network_service_client_transport_session_collision(transport_name): session1 = client1.transport.get_network._session session2 = client2.transport.get_network._session assert session1 != session2 + session1 = client1.transport.list_networks._session + session2 = client2.transport.list_networks._session + assert session1 != session2 def test_ad_unit_path(): @@ -1697,7 +1861,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -1724,7 +1888,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py index e9bf7e0b618d..f3de0981207f 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py @@ -50,7 +50,13 @@ pagers, transports, ) -from google.ads.admanager_v1.types import applied_label, order_service +from google.ads.admanager_v1.types import ( + applied_label, + custom_field_value, + order_enums, + order_messages, + order_service, +) def client_cert_source_callback(): @@ -956,7 +962,7 @@ def test_get_order_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = order_service.Order( + return_value = order_messages.Order( name="name_value", order_id=840, display_name="display_name_value", @@ -970,12 +976,13 @@ def test_get_order_rest(request_type): effective_teams=["effective_teams_value"], creator="creator_value", currency_code="currency_code_value", + unlimited_end_time=True, external_order_id=1802, archived=True, last_modified_by_app="last_modified_by_app_value", notes="notes_value", po_number="po_number_value", - status=order_service.Order.Status.DRAFT, + status=order_enums.OrderStatusEnum.OrderStatus.DRAFT, salesperson="salesperson_value", secondary_salespeople=["secondary_salespeople_value"], secondary_traffickers=["secondary_traffickers_value"], @@ -985,7 +992,7 @@ def test_get_order_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = order_service.Order.pb(return_value) + return_value = order_messages.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -993,7 +1000,7 @@ def test_get_order_rest(request_type): response = client.get_order(request) # Establish that the response is the type that we expect. - assert isinstance(response, order_service.Order) + assert isinstance(response, order_messages.Order) assert response.name == "name_value" assert response.order_id == 840 assert response.display_name == "display_name_value" @@ -1007,12 +1014,13 @@ def test_get_order_rest(request_type): assert response.effective_teams == ["effective_teams_value"] assert response.creator == "creator_value" assert response.currency_code == "currency_code_value" + assert response.unlimited_end_time is True assert response.external_order_id == 1802 assert response.archived is True assert response.last_modified_by_app == "last_modified_by_app_value" assert response.notes == "notes_value" assert response.po_number == "po_number_value" - assert response.status == order_service.Order.Status.DRAFT + assert response.status == order_enums.OrderStatusEnum.OrderStatus.DRAFT assert response.salesperson == "salesperson_value" assert response.secondary_salespeople == ["secondary_salespeople_value"] assert response.secondary_traffickers == ["secondary_traffickers_value"] @@ -1092,7 +1100,7 @@ def test_get_order_rest_required_fields(request_type=order_service.GetOrderReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = order_service.Order() + return_value = order_messages.Order() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1113,7 +1121,7 @@ def test_get_order_rest_required_fields(request_type=order_service.GetOrderReque response_value.status_code = 200 # Convert return value to protobuf type - return_value = order_service.Order.pb(return_value) + return_value = order_messages.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1166,7 +1174,7 @@ def test_get_order_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = order_service.Order.to_json(order_service.Order()) + req.return_value._content = order_messages.Order.to_json(order_messages.Order()) request = order_service.GetOrderRequest() metadata = [ @@ -1174,7 +1182,7 @@ def test_get_order_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = order_service.Order() + post.return_value = order_messages.Order() client.get_order( request, @@ -1221,7 +1229,7 @@ def test_get_order_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = order_service.Order() + return_value = order_messages.Order() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/orders/sample2"} @@ -1236,7 +1244,7 @@ def test_get_order_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = order_service.Order.pb(return_value) + return_value = order_messages.Order.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1603,9 +1611,9 @@ def test_list_orders_rest_pager(transport: str = "rest"): response = ( order_service.ListOrdersResponse( orders=[ - order_service.Order(), - order_service.Order(), - order_service.Order(), + order_messages.Order(), + order_messages.Order(), + order_messages.Order(), ], next_page_token="abc", ), @@ -1615,14 +1623,14 @@ def test_list_orders_rest_pager(transport: str = "rest"): ), order_service.ListOrdersResponse( orders=[ - order_service.Order(), + order_messages.Order(), ], next_page_token="ghi", ), order_service.ListOrdersResponse( orders=[ - order_service.Order(), - order_service.Order(), + order_messages.Order(), + order_messages.Order(), ], ), ) @@ -1643,7 +1651,7 @@ def test_list_orders_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, order_service.Order) for i in results) + assert all(isinstance(i, order_messages.Order) for i in results) pages = list(client.list_orders(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -1948,9 +1956,32 @@ def test_parse_contact_path(): assert expected == actual -def test_label_path(): +def test_custom_field_path(): network_code = "winkle" - label = "nautilus" + custom_field = "nautilus" + expected = "networks/{network_code}/customFields/{custom_field}".format( + network_code=network_code, + custom_field=custom_field, + ) + actual = OrderServiceClient.custom_field_path(network_code, custom_field) + assert expected == actual + + +def test_parse_custom_field_path(): + expected = { + "network_code": "scallop", + "custom_field": "abalone", + } + path = OrderServiceClient.custom_field_path(**expected) + + # Check that the path construction is reversible. + actual = OrderServiceClient.parse_custom_field_path(path) + assert expected == actual + + +def test_label_path(): + network_code = "squid" + label = "clam" expected = "networks/{network_code}/labels/{label}".format( network_code=network_code, label=label, @@ -1961,8 +1992,8 @@ def test_label_path(): def test_parse_label_path(): expected = { - "network_code": "scallop", - "label": "abalone", + "network_code": "whelk", + "label": "octopus", } path = OrderServiceClient.label_path(**expected) @@ -1972,7 +2003,7 @@ def test_parse_label_path(): def test_network_path(): - network_code = "squid" + network_code = "oyster" expected = "networks/{network_code}".format( network_code=network_code, ) @@ -1982,7 +2013,7 @@ def test_network_path(): def test_parse_network_path(): expected = { - "network_code": "clam", + "network_code": "nudibranch", } path = OrderServiceClient.network_path(**expected) @@ -1992,8 +2023,8 @@ def test_parse_network_path(): def test_order_path(): - network_code = "whelk" - order = "octopus" + network_code = "cuttlefish" + order = "mussel" expected = "networks/{network_code}/orders/{order}".format( network_code=network_code, order=order, @@ -2004,8 +2035,8 @@ def test_order_path(): def test_parse_order_path(): expected = { - "network_code": "oyster", - "order": "nudibranch", + "network_code": "winkle", + "order": "nautilus", } path = OrderServiceClient.order_path(**expected) @@ -2015,8 +2046,8 @@ def test_parse_order_path(): def test_team_path(): - network_code = "cuttlefish" - team = "mussel" + network_code = "scallop" + team = "abalone" expected = "networks/{network_code}/teams/{team}".format( network_code=network_code, team=team, @@ -2027,8 +2058,8 @@ def test_team_path(): def test_parse_team_path(): expected = { - "network_code": "winkle", - "team": "nautilus", + "network_code": "squid", + "team": "clam", } path = OrderServiceClient.team_path(**expected) @@ -2038,8 +2069,8 @@ def test_parse_team_path(): def test_user_path(): - network_code = "scallop" - user = "abalone" + network_code = "whelk" + user = "octopus" expected = "networks/{network_code}/users/{user}".format( network_code=network_code, user=user, @@ -2050,8 +2081,8 @@ def test_user_path(): def test_parse_user_path(): expected = { - "network_code": "squid", - "user": "clam", + "network_code": "oyster", + "user": "nudibranch", } path = OrderServiceClient.user_path(**expected) @@ -2061,7 +2092,7 @@ def test_parse_user_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2071,7 +2102,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "mussel", } path = OrderServiceClient.common_billing_account_path(**expected) @@ -2081,7 +2112,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -2091,7 +2122,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "nautilus", } path = OrderServiceClient.common_folder_path(**expected) @@ -2101,7 +2132,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -2111,7 +2142,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "abalone", } path = OrderServiceClient.common_organization_path(**expected) @@ -2121,7 +2152,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -2131,7 +2162,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "clam", } path = OrderServiceClient.common_project_path(**expected) @@ -2141,8 +2172,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -2153,8 +2184,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "oyster", + "location": "nudibranch", } path = OrderServiceClient.common_location_path(**expected) @@ -2196,7 +2227,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2223,7 +2254,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py index bcfdc6a89ea0..96d00d2422b4 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py @@ -50,7 +50,11 @@ pagers, transports, ) -from google.ads.admanager_v1.types import placement_enums, placement_service +from google.ads.admanager_v1.types import ( + placement_enums, + placement_messages, + placement_service, +) def client_cert_source_callback(): @@ -993,7 +997,7 @@ def test_get_placement_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = placement_service.Placement( + return_value = placement_messages.Placement( name="name_value", placement_id=1253, display_name="display_name_value", @@ -1007,7 +1011,7 @@ def test_get_placement_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = placement_service.Placement.pb(return_value) + return_value = placement_messages.Placement.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1015,7 +1019,7 @@ def test_get_placement_rest(request_type): response = client.get_placement(request) # Establish that the response is the type that we expect. - assert isinstance(response, placement_service.Placement) + assert isinstance(response, placement_messages.Placement) assert response.name == "name_value" assert response.placement_id == 1253 assert response.display_name == "display_name_value" @@ -1101,7 +1105,7 @@ def test_get_placement_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = placement_service.Placement() + return_value = placement_messages.Placement() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1122,7 +1126,7 @@ def test_get_placement_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = placement_service.Placement.pb(return_value) + return_value = placement_messages.Placement.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1177,8 +1181,8 @@ def test_get_placement_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = placement_service.Placement.to_json( - placement_service.Placement() + req.return_value._content = placement_messages.Placement.to_json( + placement_messages.Placement() ) request = placement_service.GetPlacementRequest() @@ -1187,7 +1191,7 @@ def test_get_placement_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = placement_service.Placement() + post.return_value = placement_messages.Placement() client.get_placement( request, @@ -1234,7 +1238,7 @@ def test_get_placement_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = placement_service.Placement() + return_value = placement_messages.Placement() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/placements/sample2"} @@ -1249,7 +1253,7 @@ def test_get_placement_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = placement_service.Placement.pb(return_value) + return_value = placement_messages.Placement.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1618,9 +1622,9 @@ def test_list_placements_rest_pager(transport: str = "rest"): response = ( placement_service.ListPlacementsResponse( placements=[ - placement_service.Placement(), - placement_service.Placement(), - placement_service.Placement(), + placement_messages.Placement(), + placement_messages.Placement(), + placement_messages.Placement(), ], next_page_token="abc", ), @@ -1630,14 +1634,14 @@ def test_list_placements_rest_pager(transport: str = "rest"): ), placement_service.ListPlacementsResponse( placements=[ - placement_service.Placement(), + placement_messages.Placement(), ], next_page_token="ghi", ), placement_service.ListPlacementsResponse( placements=[ - placement_service.Placement(), - placement_service.Placement(), + placement_messages.Placement(), + placement_messages.Placement(), ], ), ) @@ -1660,7 +1664,7 @@ def test_list_placements_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, placement_service.Placement) for i in results) + assert all(isinstance(i, placement_messages.Placement) for i in results) pages = list(client.list_placements(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -2121,7 +2125,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2148,7 +2152,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py index 9429af4d5338..687f2f1961b5 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py @@ -44,7 +44,12 @@ from google.auth.exceptions import MutualTLSChannelError from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -55,6 +60,7 @@ from google.ads.admanager_v1.services.report_service import ( ReportServiceClient, + pagers, transports, ) from google.ads.admanager_v1.types import report_service @@ -959,18 +965,1643 @@ def test_report_service_client_client_options_credentials_file( @pytest.mark.parametrize( "request_type", [ - report_service.ExportSavedReportRequest, + report_service.GetReportRequest, dict, ], ) -def test_export_saved_report_rest(request_type): +def test_get_report_rest(request_type): client = ReportServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"report": "networks/sample1/reports/sample2"} + request_init = {"name": "networks/sample1/reports/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.Report( + name="name_value", + report_id=968, + visibility=report_service.Report.Visibility.DRAFT, + display_name="display_name_value", + locale="locale_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, report_service.Report) + assert response.name == "name_value" + assert response.report_id == 968 + assert response.visibility == report_service.Report.Visibility.DRAFT + assert response.display_name == "display_name_value" + assert response.locale == "locale_value" + + +def test_get_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_report] = mock_rpc + + request = {} + client.get_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_report_rest_required_fields(request_type=report_service.GetReportRequest): + transport_class = transports.ReportServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = report_service.Report() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_report(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_report_rest_unset_required_fields(): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_report._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_report_rest_interceptors(null_interceptor): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReportServiceRestInterceptor(), + ) + client = ReportServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_get_report" + ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "pre_get_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = report_service.GetReportRequest.pb( + report_service.GetReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = report_service.Report.to_json( + report_service.Report() + ) + + request = report_service.GetReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = report_service.Report() + + client.get_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_report_rest_bad_request( + transport: str = "rest", request_type=report_service.GetReportRequest +): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "networks/sample1/reports/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_report(request) + + +def test_get_report_rest_flattened(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.Report() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "networks/sample1/reports/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=networks/*/reports/*}" % client.transport._host, args[1] + ) + + +def test_get_report_rest_flattened_error(transport: str = "rest"): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_report( + report_service.GetReportRequest(), + name="name_value", + ) + + +def test_get_report_rest_error(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + report_service.ListReportsRequest, + dict, + ], +) +def test_list_reports_rest(request_type): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.ListReportsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.ListReportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_reports(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportsPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_reports_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_reports in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_reports] = mock_rpc + + request = {} + client.list_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_reports(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_reports_rest_required_fields( + request_type=report_service.ListReportsRequest, +): + transport_class = transports.ReportServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_reports._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_reports._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "skip", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = report_service.ListReportsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = report_service.ListReportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_reports(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_reports_rest_unset_required_fields(): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_reports._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "skip", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_reports_rest_interceptors(null_interceptor): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReportServiceRestInterceptor(), + ) + client = ReportServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_list_reports" + ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "pre_list_reports" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = report_service.ListReportsRequest.pb( + report_service.ListReportsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = report_service.ListReportsResponse.to_json( + report_service.ListReportsResponse() + ) + + request = report_service.ListReportsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = report_service.ListReportsResponse() + + client.list_reports( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_reports_rest_bad_request( + transport: str = "rest", request_type=report_service.ListReportsRequest +): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_reports(request) + + +def test_list_reports_rest_flattened(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.ListReportsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.ListReportsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_reports(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/reports" % client.transport._host, args[1] + ) + + +def test_list_reports_rest_flattened_error(transport: str = "rest"): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_reports( + report_service.ListReportsRequest(), + parent="parent_value", + ) + + +def test_list_reports_rest_pager(transport: str = "rest"): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + report_service.ListReportsResponse( + reports=[ + report_service.Report(), + report_service.Report(), + report_service.Report(), + ], + next_page_token="abc", + ), + report_service.ListReportsResponse( + reports=[], + next_page_token="def", + ), + report_service.ListReportsResponse( + reports=[ + report_service.Report(), + ], + next_page_token="ghi", + ), + report_service.ListReportsResponse( + reports=[ + report_service.Report(), + report_service.Report(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + report_service.ListReportsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "networks/sample1"} + + pager = client.list_reports(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, report_service.Report) for i in results) + + pages = list(client.list_reports(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + report_service.CreateReportRequest, + dict, + ], +) +def test_create_report_rest(request_type): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request_init["report"] = { + "name": "name_value", + "report_id": 968, + "visibility": 1, + "report_definition": { + "dimensions": [242], + "metrics": [61], + "filters": [ + { + "field_filter": { + "field": {"dimension": 242, "metric": 61}, + "operation": 1, + "values": [ + { + "int_value": 967, + "double_value": 0.12710000000000002, + "string_value": "string_value_value", + "bool_value": True, + "int_list_value": {"values": [657, 658]}, + "string_list_value": { + "values": ["values_value1", "values_value2"] + }, + "bytes_value": b"bytes_value_blob", + } + ], + "slice_": {"dimension": 242, "value": {}}, + "time_period_index": 1800, + "metric_value_type": 1, + }, + "not_filter": {}, + "and_filter": {"filters": {}}, + "or_filter": {}, + } + ], + "time_zone": "time_zone_value", + "currency_code": "currency_code_value", + "date_range": { + "fixed": { + "start_date": {"year": 433, "month": 550, "day": 318}, + "end_date": {}, + }, + "relative": 1, + }, + "comparison_date_range": {}, + "custom_dimension_key_ids": [2568, 2569], + "line_item_custom_field_ids": [2739, 2740], + "order_custom_field_ids": [2329, 2330], + "creative_custom_field_ids": [2640, 2641], + "report_type": 1, + "time_period_column": 1, + "flags": [{"filters": {}, "name": "name_value"}], + "sorts": [ + { + "field": {}, + "descending": True, + "slice_": {}, + "time_period_index": 1800, + "metric_value_type": 1, + } + ], + }, + "display_name": "display_name_value", + "update_time": {"seconds": 751, "nanos": 543}, + "create_time": {}, + "locale": "locale_value", + "schedule_options": { + "schedule": { + "weekly_schedule": {"weekly_scheduled_days": [1]}, + "monthly_schedule": {"monthly_scheduled_days": [2348, 2349]}, + "start_date": {}, + "end_date": {}, + "frequency": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + }, + "delivery_condition": 1, + "flags": {}, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = report_service.CreateReportRequest.meta.fields["report"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["report"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["report"][field])): + del request_init["report"][field][i][subfield] + else: + del request_init["report"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.Report( + name="name_value", + report_id=968, + visibility=report_service.Report.Visibility.DRAFT, + display_name="display_name_value", + locale="locale_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, report_service.Report) + assert response.name == "name_value" + assert response.report_id == 968 + assert response.visibility == report_service.Report.Visibility.DRAFT + assert response.display_name == "display_name_value" + assert response.locale == "locale_value" + + +def test_create_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_report] = mock_rpc + + request = {} + client.create_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_report_rest_required_fields( + request_type=report_service.CreateReportRequest, +): + transport_class = transports.ReportServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = report_service.Report() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_report(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_report_rest_unset_required_fields(): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_report._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "report", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_report_rest_interceptors(null_interceptor): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReportServiceRestInterceptor(), + ) + client = ReportServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_create_report" + ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "pre_create_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = report_service.CreateReportRequest.pb( + report_service.CreateReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = report_service.Report.to_json( + report_service.Report() + ) + + request = report_service.CreateReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = report_service.Report() + + client.create_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_report_rest_bad_request( + transport: str = "rest", request_type=report_service.CreateReportRequest +): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "networks/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_report(request) + + +def test_create_report_rest_flattened(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.Report() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "networks/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + report=report_service.Report(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=networks/*}/reports" % client.transport._host, args[1] + ) + + +def test_create_report_rest_flattened_error(transport: str = "rest"): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_report( + report_service.CreateReportRequest(), + parent="parent_value", + report=report_service.Report(name="name_value"), + ) + + +def test_create_report_rest_error(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + report_service.UpdateReportRequest, + dict, + ], +) +def test_update_report_rest(request_type): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"report": {"name": "networks/sample1/reports/sample2"}} + request_init["report"] = { + "name": "networks/sample1/reports/sample2", + "report_id": 968, + "visibility": 1, + "report_definition": { + "dimensions": [242], + "metrics": [61], + "filters": [ + { + "field_filter": { + "field": {"dimension": 242, "metric": 61}, + "operation": 1, + "values": [ + { + "int_value": 967, + "double_value": 0.12710000000000002, + "string_value": "string_value_value", + "bool_value": True, + "int_list_value": {"values": [657, 658]}, + "string_list_value": { + "values": ["values_value1", "values_value2"] + }, + "bytes_value": b"bytes_value_blob", + } + ], + "slice_": {"dimension": 242, "value": {}}, + "time_period_index": 1800, + "metric_value_type": 1, + }, + "not_filter": {}, + "and_filter": {"filters": {}}, + "or_filter": {}, + } + ], + "time_zone": "time_zone_value", + "currency_code": "currency_code_value", + "date_range": { + "fixed": { + "start_date": {"year": 433, "month": 550, "day": 318}, + "end_date": {}, + }, + "relative": 1, + }, + "comparison_date_range": {}, + "custom_dimension_key_ids": [2568, 2569], + "line_item_custom_field_ids": [2739, 2740], + "order_custom_field_ids": [2329, 2330], + "creative_custom_field_ids": [2640, 2641], + "report_type": 1, + "time_period_column": 1, + "flags": [{"filters": {}, "name": "name_value"}], + "sorts": [ + { + "field": {}, + "descending": True, + "slice_": {}, + "time_period_index": 1800, + "metric_value_type": 1, + } + ], + }, + "display_name": "display_name_value", + "update_time": {"seconds": 751, "nanos": 543}, + "create_time": {}, + "locale": "locale_value", + "schedule_options": { + "schedule": { + "weekly_schedule": {"weekly_scheduled_days": [1]}, + "monthly_schedule": {"monthly_scheduled_days": [2348, 2349]}, + "start_date": {}, + "end_date": {}, + "frequency": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + }, + "delivery_condition": 1, + "flags": {}, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = report_service.UpdateReportRequest.meta.fields["report"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["report"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["report"][field])): + del request_init["report"][field][i][subfield] + else: + del request_init["report"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.Report( + name="name_value", + report_id=968, + visibility=report_service.Report.Visibility.DRAFT, + display_name="display_name_value", + locale="locale_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, report_service.Report) + assert response.name == "name_value" + assert response.report_id == 968 + assert response.visibility == report_service.Report.Visibility.DRAFT + assert response.display_name == "display_name_value" + assert response.locale == "locale_value" + + +def test_update_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_report] = mock_rpc + + request = {} + client.update_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_report_rest_required_fields( + request_type=report_service.UpdateReportRequest, +): + transport_class = transports.ReportServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_report._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = report_service.Report() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_report(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_report_rest_unset_required_fields(): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_report._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "report", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_report_rest_interceptors(null_interceptor): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReportServiceRestInterceptor(), + ) + client = ReportServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_update_report" + ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "pre_update_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = report_service.UpdateReportRequest.pb( + report_service.UpdateReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = report_service.Report.to_json( + report_service.Report() + ) + + request = report_service.UpdateReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = report_service.Report() + + client.update_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_report_rest_bad_request( + transport: str = "rest", request_type=report_service.UpdateReportRequest +): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"report": {"name": "networks/sample1/reports/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_report(request) + + +def test_update_report_rest_flattened(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.Report() + + # get arguments that satisfy an http rule for this method + sample_request = {"report": {"name": "networks/sample1/reports/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + report=report_service.Report(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.Report.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{report.name=networks/*/reports/*}" % client.transport._host, args[1] + ) + + +def test_update_report_rest_flattened_error(transport: str = "rest"): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_report( + report_service.UpdateReportRequest(), + report=report_service.Report(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_report_rest_error(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + report_service.RunReportRequest, + dict, + ], +) +def test_run_report_rest(request_type): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "networks/sample1/reports/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -985,13 +2616,13 @@ def test_export_saved_report_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_saved_report(request) + response = client.run_report(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_export_saved_report_rest_use_cached_wrapped_rpc(): +def test_run_report_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -1005,21 +2636,17 @@ def test_export_saved_report_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.export_saved_report in client._transport._wrapped_methods - ) + assert client._transport.run_report in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.export_saved_report - ] = mock_rpc + client._transport._wrapped_methods[client._transport.run_report] = mock_rpc request = {} - client.export_saved_report(request) + client.run_report(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -1028,19 +2655,18 @@ def test_export_saved_report_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.export_saved_report(request) + client.run_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_export_saved_report_rest_required_fields( - request_type=report_service.ExportSavedReportRequest, -): +def test_run_report_rest_required_fields(request_type=report_service.RunReportRequest): transport_class = transports.ReportServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -1051,17 +2677,21 @@ def test_export_saved_report_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_saved_report._get_unset_required_fields(jsonified_request) + ).run_report._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_saved_report._get_unset_required_fields(jsonified_request) + ).run_report._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ReportServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1095,24 +2725,24 @@ def test_export_saved_report_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.export_saved_report(request) + response = client.run_report(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_saved_report_rest_unset_required_fields(): +def test_run_report_rest_unset_required_fields(): transport = transports.ReportServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_saved_report._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("format",))) + unset_fields = transport.run_report._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_saved_report_rest_interceptors(null_interceptor): +def test_run_report_rest_interceptors(null_interceptor): transport = transports.ReportServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1127,14 +2757,14 @@ def test_export_saved_report_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.ReportServiceRestInterceptor, "post_export_saved_report" + transports.ReportServiceRestInterceptor, "post_run_report" ) as post, mock.patch.object( - transports.ReportServiceRestInterceptor, "pre_export_saved_report" + transports.ReportServiceRestInterceptor, "pre_run_report" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = report_service.ExportSavedReportRequest.pb( - report_service.ExportSavedReportRequest() + pb_message = report_service.RunReportRequest.pb( + report_service.RunReportRequest() ) transcode.return_value = { "method": "post", @@ -1150,7 +2780,7 @@ def test_export_saved_report_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = report_service.ExportSavedReportRequest() + request = report_service.RunReportRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -1158,7 +2788,7 @@ def test_export_saved_report_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.export_saved_report( + client.run_report( request, metadata=[ ("key", "val"), @@ -1170,8 +2800,8 @@ def test_export_saved_report_rest_interceptors(null_interceptor): post.assert_called_once() -def test_export_saved_report_rest_bad_request( - transport: str = "rest", request_type=report_service.ExportSavedReportRequest +def test_run_report_rest_bad_request( + transport: str = "rest", request_type=report_service.RunReportRequest ): client = ReportServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1179,7 +2809,7 @@ def test_export_saved_report_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"report": "networks/sample1/reports/sample2"} + request_init = {"name": "networks/sample1/reports/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1191,10 +2821,10 @@ def test_export_saved_report_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.export_saved_report(request) + client.run_report(request) -def test_export_saved_report_rest_flattened(): +def test_run_report_rest_flattened(): client = ReportServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1206,35 +2836,262 @@ def test_export_saved_report_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"report": "networks/sample1/reports/sample2"} + sample_request = {"name": "networks/sample1/reports/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.run_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=networks/*/reports/*}:run" % client.transport._host, args[1] + ) + + +def test_run_report_rest_flattened_error(transport: str = "rest"): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_report( + report_service.RunReportRequest(), + name="name_value", + ) + + +def test_run_report_rest_error(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + report_service.FetchReportResultRowsRequest, + dict, + ], +) +def test_fetch_report_result_rows_rest(request_type): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "networks/sample1/reports/sample2/results/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.FetchReportResultRowsResponse( + total_row_count=1635, + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.FetchReportResultRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_report_result_rows(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchReportResultRowsPager) + assert response.total_row_count == 1635 + assert response.next_page_token == "next_page_token_value" + + +def test_fetch_report_result_rows_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_report_result_rows + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_report_result_rows + ] = mock_rpc + + request = {} + client.fetch_report_result_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_report_result_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_report_result_rows_rest_interceptors(null_interceptor): + transport = transports.ReportServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReportServiceRestInterceptor(), + ) + client = ReportServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_fetch_report_result_rows" + ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "pre_fetch_report_result_rows" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = report_service.FetchReportResultRowsRequest.pb( + report_service.FetchReportResultRowsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + report_service.FetchReportResultRowsResponse.to_json( + report_service.FetchReportResultRowsResponse() + ) + ) + + request = report_service.FetchReportResultRowsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = report_service.FetchReportResultRowsResponse() + + client.fetch_report_result_rows( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_report_result_rows_rest_bad_request( + transport: str = "rest", request_type=report_service.FetchReportResultRowsRequest +): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "networks/sample1/reports/sample2/results/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_report_result_rows(request) + + +def test_fetch_report_result_rows_rest_flattened(): + client = ReportServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_service.FetchReportResultRowsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "networks/sample1/reports/sample2/results/sample3"} # get truthy value for each flattened field mock_args = dict( - report="report_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = report_service.FetchReportResultRowsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.export_saved_report(**mock_args) + client.fetch_report_result_rows(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{report=networks/*/reports/*}:exportSavedReport" + "%s/v1/{name=networks/*/reports/*/results/*}:fetchRows" % client.transport._host, args[1], ) -def test_export_saved_report_rest_flattened_error(transport: str = "rest"): +def test_fetch_report_result_rows_rest_flattened_error(transport: str = "rest"): client = ReportServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1243,17 +3100,74 @@ def test_export_saved_report_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.export_saved_report( - report_service.ExportSavedReportRequest(), - report="report_value", + client.fetch_report_result_rows( + report_service.FetchReportResultRowsRequest(), + name="name_value", ) -def test_export_saved_report_rest_error(): +def test_fetch_report_result_rows_rest_pager(transport: str = "rest"): client = ReportServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + report_service.FetchReportResultRowsResponse( + rows=[ + report_service.Report.DataTable.Row(), + report_service.Report.DataTable.Row(), + report_service.Report.DataTable.Row(), + ], + next_page_token="abc", + ), + report_service.FetchReportResultRowsResponse( + rows=[], + next_page_token="def", + ), + report_service.FetchReportResultRowsResponse( + rows=[ + report_service.Report.DataTable.Row(), + ], + next_page_token="ghi", + ), + report_service.FetchReportResultRowsResponse( + rows=[ + report_service.Report.DataTable.Row(), + report_service.Report.DataTable.Row(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + report_service.FetchReportResultRowsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"name": "networks/sample1/reports/sample2/results/sample3"} + + pager = client.fetch_report_result_rows(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, report_service.Report.DataTable.Row) for i in results) + + pages = list(client.fetch_report_result_rows(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. @@ -1365,7 +3279,12 @@ def test_report_service_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - "export_saved_report", + "get_report", + "list_reports", + "create_report", + "update_report", + "run_report", + "fetch_report_result_rows", "get_operation", ) for method in methods: @@ -1520,14 +3439,49 @@ def test_report_service_client_transport_session_collision(transport_name): credentials=creds2, transport=transport_name, ) - session1 = client1.transport.export_saved_report._session - session2 = client2.transport.export_saved_report._session + session1 = client1.transport.get_report._session + session2 = client2.transport.get_report._session + assert session1 != session2 + session1 = client1.transport.list_reports._session + session2 = client2.transport.list_reports._session + assert session1 != session2 + session1 = client1.transport.create_report._session + session2 = client2.transport.create_report._session + assert session1 != session2 + session1 = client1.transport.update_report._session + session2 = client2.transport.update_report._session + assert session1 != session2 + session1 = client1.transport.run_report._session + session2 = client2.transport.run_report._session + assert session1 != session2 + session1 = client1.transport.fetch_report_result_rows._session + session2 = client2.transport.fetch_report_result_rows._session assert session1 != session2 -def test_report_path(): +def test_network_path(): network_code = "squid" - report = "clam" + expected = "networks/{network_code}".format( + network_code=network_code, + ) + actual = ReportServiceClient.network_path(network_code) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "network_code": "clam", + } + path = ReportServiceClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = ReportServiceClient.parse_network_path(path) + assert expected == actual + + +def test_report_path(): + network_code = "whelk" + report = "octopus" expected = "networks/{network_code}/reports/{report}".format( network_code=network_code, report=report, @@ -1538,8 +3492,8 @@ def test_report_path(): def test_parse_report_path(): expected = { - "network_code": "whelk", - "report": "octopus", + "network_code": "oyster", + "report": "nudibranch", } path = ReportServiceClient.report_path(**expected) @@ -1549,7 +3503,7 @@ def test_parse_report_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -1559,7 +3513,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "mussel", } path = ReportServiceClient.common_billing_account_path(**expected) @@ -1569,7 +3523,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -1579,7 +3533,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "nautilus", } path = ReportServiceClient.common_folder_path(**expected) @@ -1589,7 +3543,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -1599,7 +3553,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "abalone", } path = ReportServiceClient.common_organization_path(**expected) @@ -1609,7 +3563,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -1619,7 +3573,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "clam", } path = ReportServiceClient.common_project_path(**expected) @@ -1629,8 +3583,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -1641,8 +3595,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "oyster", + "location": "nudibranch", } path = ReportServiceClient.common_location_path(**expected) @@ -1684,7 +3638,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -1711,7 +3665,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py index 5e255d3ce5b0..6f32f53c1607 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py @@ -49,7 +49,7 @@ pagers, transports, ) -from google.ads.admanager_v1.types import role_service +from google.ads.admanager_v1.types import role_enums, role_messages, role_service def client_cert_source_callback(): @@ -951,15 +951,20 @@ def test_get_role_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = role_service.Role( + return_value = role_messages.Role( name="name_value", + role_id=734, + display_name="display_name_value", + description="description_value", + built_in=True, + status=role_enums.RoleStatusEnum.RoleStatus.ACTIVE, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = role_service.Role.pb(return_value) + return_value = role_messages.Role.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -967,8 +972,13 @@ def test_get_role_rest(request_type): response = client.get_role(request) # Establish that the response is the type that we expect. - assert isinstance(response, role_service.Role) + assert isinstance(response, role_messages.Role) assert response.name == "name_value" + assert response.role_id == 734 + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.built_in is True + assert response.status == role_enums.RoleStatusEnum.RoleStatus.ACTIVE def test_get_role_rest_use_cached_wrapped_rpc(): @@ -1045,7 +1055,7 @@ def test_get_role_rest_required_fields(request_type=role_service.GetRoleRequest) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = role_service.Role() + return_value = role_messages.Role() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1066,7 +1076,7 @@ def test_get_role_rest_required_fields(request_type=role_service.GetRoleRequest) response_value.status_code = 200 # Convert return value to protobuf type - return_value = role_service.Role.pb(return_value) + return_value = role_messages.Role.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1119,7 +1129,7 @@ def test_get_role_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = role_service.Role.to_json(role_service.Role()) + req.return_value._content = role_messages.Role.to_json(role_messages.Role()) request = role_service.GetRoleRequest() metadata = [ @@ -1127,7 +1137,7 @@ def test_get_role_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = role_service.Role() + post.return_value = role_messages.Role() client.get_role( request, @@ -1174,7 +1184,7 @@ def test_get_role_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = role_service.Role() + return_value = role_messages.Role() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/roles/sample2"} @@ -1189,7 +1199,7 @@ def test_get_role_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = role_service.Role.pb(return_value) + return_value = role_messages.Role.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1554,9 +1564,9 @@ def test_list_roles_rest_pager(transport: str = "rest"): response = ( role_service.ListRolesResponse( roles=[ - role_service.Role(), - role_service.Role(), - role_service.Role(), + role_messages.Role(), + role_messages.Role(), + role_messages.Role(), ], next_page_token="abc", ), @@ -1566,14 +1576,14 @@ def test_list_roles_rest_pager(transport: str = "rest"): ), role_service.ListRolesResponse( roles=[ - role_service.Role(), + role_messages.Role(), ], next_page_token="ghi", ), role_service.ListRolesResponse( roles=[ - role_service.Role(), - role_service.Role(), + role_messages.Role(), + role_messages.Role(), ], ), ) @@ -1594,7 +1604,7 @@ def test_list_roles_rest_pager(transport: str = "rest"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, role_service.Role) for i in results) + assert all(isinstance(i, role_messages.Role) for i in results) pages = list(client.list_roles(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -2032,7 +2042,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2059,7 +2069,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_partner_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py similarity index 75% rename from packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_partner_service.py rename to packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py index 395841658712..c6f333dc2ed3 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_partner_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py @@ -44,12 +44,16 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.ads.admanager_v1.services.ad_partner_service import ( - AdPartnerServiceClient, +from google.ads.admanager_v1.services.taxonomy_category_service import ( + TaxonomyCategoryServiceClient, pagers, transports, ) -from google.ads.admanager_v1.types import ad_partner_service +from google.ads.admanager_v1.types import ( + taxonomy_category_messages, + taxonomy_category_service, + taxonomy_type_enum, +) def client_cert_source_callback(): @@ -85,41 +89,45 @@ def test__get_default_mtls_endpoint(): sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" - assert AdPartnerServiceClient._get_default_mtls_endpoint(None) is None + assert TaxonomyCategoryServiceClient._get_default_mtls_endpoint(None) is None assert ( - AdPartnerServiceClient._get_default_mtls_endpoint(api_endpoint) + TaxonomyCategoryServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint ) assert ( - AdPartnerServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + TaxonomyCategoryServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint ) assert ( - AdPartnerServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + TaxonomyCategoryServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint ) assert ( - AdPartnerServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + TaxonomyCategoryServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint ) assert ( - AdPartnerServiceClient._get_default_mtls_endpoint(non_googleapi) + TaxonomyCategoryServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi ) def test__read_environment_variables(): - assert AdPartnerServiceClient._read_environment_variables() == (False, "auto", None) + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AdPartnerServiceClient._read_environment_variables() == ( + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( True, "auto", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AdPartnerServiceClient._read_environment_variables() == ( + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( False, "auto", None, @@ -129,28 +137,28 @@ def test__read_environment_variables(): os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError) as excinfo: - AdPartnerServiceClient._read_environment_variables() + TaxonomyCategoryServiceClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AdPartnerServiceClient._read_environment_variables() == ( + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( False, "never", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AdPartnerServiceClient._read_environment_variables() == ( + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( False, "always", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AdPartnerServiceClient._read_environment_variables() == ( + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( False, "auto", None, @@ -158,14 +166,14 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: - AdPartnerServiceClient._read_environment_variables() + TaxonomyCategoryServiceClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AdPartnerServiceClient._read_environment_variables() == ( + assert TaxonomyCategoryServiceClient._read_environment_variables() == ( False, "auto", "foo.com", @@ -176,13 +184,17 @@ def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() - assert AdPartnerServiceClient._get_client_cert_source(None, False) is None + assert TaxonomyCategoryServiceClient._get_client_cert_source(None, False) is None assert ( - AdPartnerServiceClient._get_client_cert_source(mock_provided_cert_source, False) + TaxonomyCategoryServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) is None ) assert ( - AdPartnerServiceClient._get_client_cert_source(mock_provided_cert_source, True) + TaxonomyCategoryServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) == mock_provided_cert_source ) @@ -194,11 +206,11 @@ def test__get_client_cert_source(): return_value=mock_default_cert_source, ): assert ( - AdPartnerServiceClient._get_client_cert_source(None, True) + TaxonomyCategoryServiceClient._get_client_cert_source(None, True) is mock_default_cert_source ) assert ( - AdPartnerServiceClient._get_client_cert_source( + TaxonomyCategoryServiceClient._get_client_cert_source( mock_provided_cert_source, "true" ) is mock_provided_cert_source @@ -206,59 +218,67 @@ def test__get_client_cert_source(): @mock.patch.object( - AdPartnerServiceClient, + TaxonomyCategoryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(AdPartnerServiceClient), + modify_default_endpoint_template(TaxonomyCategoryServiceClient), ) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() - default_universe = AdPartnerServiceClient._DEFAULT_UNIVERSE - default_endpoint = AdPartnerServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = TaxonomyCategoryServiceClient._DEFAULT_UNIVERSE + default_endpoint = TaxonomyCategoryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = AdPartnerServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = TaxonomyCategoryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) assert ( - AdPartnerServiceClient._get_api_endpoint( + TaxonomyCategoryServiceClient._get_api_endpoint( api_override, mock_client_cert_source, default_universe, "always" ) == api_override ) assert ( - AdPartnerServiceClient._get_api_endpoint( + TaxonomyCategoryServiceClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "auto" ) - == AdPartnerServiceClient.DEFAULT_MTLS_ENDPOINT + == TaxonomyCategoryServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - AdPartnerServiceClient._get_api_endpoint(None, None, default_universe, "auto") + TaxonomyCategoryServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) == default_endpoint ) assert ( - AdPartnerServiceClient._get_api_endpoint(None, None, default_universe, "always") - == AdPartnerServiceClient.DEFAULT_MTLS_ENDPOINT + TaxonomyCategoryServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == TaxonomyCategoryServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - AdPartnerServiceClient._get_api_endpoint( + TaxonomyCategoryServiceClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "always" ) - == AdPartnerServiceClient.DEFAULT_MTLS_ENDPOINT + == TaxonomyCategoryServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - AdPartnerServiceClient._get_api_endpoint(None, None, mock_universe, "never") + TaxonomyCategoryServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) == mock_endpoint ) assert ( - AdPartnerServiceClient._get_api_endpoint(None, None, default_universe, "never") + TaxonomyCategoryServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) == default_endpoint ) with pytest.raises(MutualTLSChannelError) as excinfo: - AdPartnerServiceClient._get_api_endpoint( + TaxonomyCategoryServiceClient._get_api_endpoint( None, mock_client_cert_source, mock_universe, "auto" ) assert ( @@ -272,29 +292,33 @@ def test__get_universe_domain(): universe_domain_env = "bar.com" assert ( - AdPartnerServiceClient._get_universe_domain( + TaxonomyCategoryServiceClient._get_universe_domain( client_universe_domain, universe_domain_env ) == client_universe_domain ) assert ( - AdPartnerServiceClient._get_universe_domain(None, universe_domain_env) + TaxonomyCategoryServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env ) assert ( - AdPartnerServiceClient._get_universe_domain(None, None) - == AdPartnerServiceClient._DEFAULT_UNIVERSE + TaxonomyCategoryServiceClient._get_universe_domain(None, None) + == TaxonomyCategoryServiceClient._DEFAULT_UNIVERSE ) with pytest.raises(ValueError) as excinfo: - AdPartnerServiceClient._get_universe_domain("", None) + TaxonomyCategoryServiceClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (AdPartnerServiceClient, transports.AdPartnerServiceRestTransport, "rest"), + ( + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, + "rest", + ), ], ) def test__validate_universe_domain(client_class, transport_class, transport_name): @@ -373,10 +397,10 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (AdPartnerServiceClient, "rest"), + (TaxonomyCategoryServiceClient, "rest"), ], ) -def test_ad_partner_service_client_from_service_account_info( +def test_taxonomy_category_service_client_from_service_account_info( client_class, transport_name ): creds = ga_credentials.AnonymousCredentials() @@ -399,10 +423,10 @@ def test_ad_partner_service_client_from_service_account_info( @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.AdPartnerServiceRestTransport, "rest"), + (transports.TaxonomyCategoryServiceRestTransport, "rest"), ], ) -def test_ad_partner_service_client_service_account_always_use_jwt( +def test_taxonomy_category_service_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -423,10 +447,10 @@ def test_ad_partner_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (AdPartnerServiceClient, "rest"), + (TaxonomyCategoryServiceClient, "rest"), ], ) -def test_ad_partner_service_client_from_service_account_file( +def test_taxonomy_category_service_client_from_service_account_file( client_class, transport_name ): creds = ga_credentials.AnonymousCredentials() @@ -453,39 +477,43 @@ def test_ad_partner_service_client_from_service_account_file( ) -def test_ad_partner_service_client_get_transport_class(): - transport = AdPartnerServiceClient.get_transport_class() +def test_taxonomy_category_service_client_get_transport_class(): + transport = TaxonomyCategoryServiceClient.get_transport_class() available_transports = [ - transports.AdPartnerServiceRestTransport, + transports.TaxonomyCategoryServiceRestTransport, ] assert transport in available_transports - transport = AdPartnerServiceClient.get_transport_class("rest") - assert transport == transports.AdPartnerServiceRestTransport + transport = TaxonomyCategoryServiceClient.get_transport_class("rest") + assert transport == transports.TaxonomyCategoryServiceRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (AdPartnerServiceClient, transports.AdPartnerServiceRestTransport, "rest"), + ( + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, + "rest", + ), ], ) @mock.patch.object( - AdPartnerServiceClient, + TaxonomyCategoryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(AdPartnerServiceClient), + modify_default_endpoint_template(TaxonomyCategoryServiceClient), ) -def test_ad_partner_service_client_client_options( +def test_taxonomy_category_service_client_client_options( client_class, transport_class, transport_name ): # Check that if channel is provided we won't create a new one. - with mock.patch.object(AdPartnerServiceClient, "get_transport_class") as gtc: + with mock.patch.object(TaxonomyCategoryServiceClient, "get_transport_class") as gtc: transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AdPartnerServiceClient, "get_transport_class") as gtc: + with mock.patch.object(TaxonomyCategoryServiceClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() @@ -609,26 +637,26 @@ def test_ad_partner_service_client_client_options( "client_class,transport_class,transport_name,use_client_cert_env", [ ( - AdPartnerServiceClient, - transports.AdPartnerServiceRestTransport, + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, "rest", "true", ), ( - AdPartnerServiceClient, - transports.AdPartnerServiceRestTransport, + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, "rest", "false", ), ], ) @mock.patch.object( - AdPartnerServiceClient, + TaxonomyCategoryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(AdPartnerServiceClient), + modify_default_endpoint_template(TaxonomyCategoryServiceClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_ad_partner_service_client_mtls_env_auto( +def test_taxonomy_category_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env ): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -730,13 +758,15 @@ def test_ad_partner_service_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [AdPartnerServiceClient]) +@pytest.mark.parametrize("client_class", [TaxonomyCategoryServiceClient]) @mock.patch.object( - AdPartnerServiceClient, + TaxonomyCategoryServiceClient, "DEFAULT_ENDPOINT", - modify_default_endpoint(AdPartnerServiceClient), + modify_default_endpoint(TaxonomyCategoryServiceClient), ) -def test_ad_partner_service_client_get_mtls_endpoint_and_cert_source(client_class): +def test_taxonomy_category_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". @@ -827,21 +857,21 @@ def test_ad_partner_service_client_get_mtls_endpoint_and_cert_source(client_clas ) -@pytest.mark.parametrize("client_class", [AdPartnerServiceClient]) +@pytest.mark.parametrize("client_class", [TaxonomyCategoryServiceClient]) @mock.patch.object( - AdPartnerServiceClient, + TaxonomyCategoryServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(AdPartnerServiceClient), + modify_default_endpoint_template(TaxonomyCategoryServiceClient), ) -def test_ad_partner_service_client_client_api_endpoint(client_class): +def test_taxonomy_category_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" - default_universe = AdPartnerServiceClient._DEFAULT_UNIVERSE - default_endpoint = AdPartnerServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = TaxonomyCategoryServiceClient._DEFAULT_UNIVERSE + default_endpoint = TaxonomyCategoryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = AdPartnerServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = TaxonomyCategoryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) @@ -909,10 +939,14 @@ def test_ad_partner_service_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (AdPartnerServiceClient, transports.AdPartnerServiceRestTransport, "rest"), + ( + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, + "rest", + ), ], ) -def test_ad_partner_service_client_client_options_scopes( +def test_taxonomy_category_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. @@ -941,14 +975,14 @@ def test_ad_partner_service_client_client_options_scopes( "client_class,transport_class,transport_name,grpc_helpers", [ ( - AdPartnerServiceClient, - transports.AdPartnerServiceRestTransport, + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, "rest", None, ), ], ) -def test_ad_partner_service_client_client_options_credentials_file( +def test_taxonomy_category_service_client_client_options_credentials_file( client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. @@ -975,48 +1009,65 @@ def test_ad_partner_service_client_client_options_credentials_file( @pytest.mark.parametrize( "request_type", [ - ad_partner_service.GetAdPartnerRequest, + taxonomy_category_service.GetTaxonomyCategoryRequest, dict, ], ) -def test_get_ad_partner_rest(request_type): - client = AdPartnerServiceClient( +def test_get_taxonomy_category_rest(request_type): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/adPartners/sample2"} + request_init = {"name": "networks/sample1/taxonomyCategories/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ad_partner_service.AdPartner( + return_value = taxonomy_category_messages.TaxonomyCategory( name="name_value", + taxonomy_category_id=2152, + display_name="display_name_value", + grouping_only=True, + parent_taxonomy_category_id=2897, + taxonomy_type=taxonomy_type_enum.TaxonomyTypeEnum.TaxonomyType.TAXONOMY_IAB_AUDIENCE_1_1, + ancestor_names=["ancestor_names_value"], + ancestor_taxonomy_category_ids=[3225], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_partner_service.AdPartner.pb(return_value) + return_value = taxonomy_category_messages.TaxonomyCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_ad_partner(request) + response = client.get_taxonomy_category(request) # Establish that the response is the type that we expect. - assert isinstance(response, ad_partner_service.AdPartner) + assert isinstance(response, taxonomy_category_messages.TaxonomyCategory) assert response.name == "name_value" + assert response.taxonomy_category_id == 2152 + assert response.display_name == "display_name_value" + assert response.grouping_only is True + assert response.parent_taxonomy_category_id == 2897 + assert ( + response.taxonomy_type + == taxonomy_type_enum.TaxonomyTypeEnum.TaxonomyType.TAXONOMY_IAB_AUDIENCE_1_1 + ) + assert response.ancestor_names == ["ancestor_names_value"] + assert response.ancestor_taxonomy_category_ids == [3225] -def test_get_ad_partner_rest_use_cached_wrapped_rpc(): +def test_get_taxonomy_category_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1026,32 +1077,37 @@ def test_get_ad_partner_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_ad_partner in client._transport._wrapped_methods + assert ( + client._transport.get_taxonomy_category + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_ad_partner] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_taxonomy_category + ] = mock_rpc request = {} - client.get_ad_partner(request) + client.get_taxonomy_category(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_ad_partner(request) + client.get_taxonomy_category(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_ad_partner_rest_required_fields( - request_type=ad_partner_service.GetAdPartnerRequest, +def test_get_taxonomy_category_rest_required_fields( + request_type=taxonomy_category_service.GetTaxonomyCategoryRequest, ): - transport_class = transports.AdPartnerServiceRestTransport + transport_class = transports.TaxonomyCategoryServiceRestTransport request_init = {} request_init["name"] = "" @@ -1065,7 +1121,7 @@ def test_get_ad_partner_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_ad_partner._get_unset_required_fields(jsonified_request) + ).get_taxonomy_category._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1074,21 +1130,21 @@ def test_get_ad_partner_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_ad_partner._get_unset_required_fields(jsonified_request) + ).get_taxonomy_category._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ad_partner_service.AdPartner() + return_value = taxonomy_category_messages.TaxonomyCategory() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1109,50 +1165,50 @@ def test_get_ad_partner_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_partner_service.AdPartner.pb(return_value) + return_value = taxonomy_category_messages.TaxonomyCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_ad_partner(request) + response = client.get_taxonomy_category(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_ad_partner_rest_unset_required_fields(): - transport = transports.AdPartnerServiceRestTransport( +def test_get_taxonomy_category_rest_unset_required_fields(): + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_ad_partner._get_unset_required_fields({}) + unset_fields = transport.get_taxonomy_category._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_ad_partner_rest_interceptors(null_interceptor): - transport = transports.AdPartnerServiceRestTransport( +def test_get_taxonomy_category_rest_interceptors(null_interceptor): + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.AdPartnerServiceRestInterceptor(), + else transports.TaxonomyCategoryServiceRestInterceptor(), ) - client = AdPartnerServiceClient(transport=transport) + client = TaxonomyCategoryServiceClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AdPartnerServiceRestInterceptor, "post_get_ad_partner" + transports.TaxonomyCategoryServiceRestInterceptor, "post_get_taxonomy_category" ) as post, mock.patch.object( - transports.AdPartnerServiceRestInterceptor, "pre_get_ad_partner" + transports.TaxonomyCategoryServiceRestInterceptor, "pre_get_taxonomy_category" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = ad_partner_service.GetAdPartnerRequest.pb( - ad_partner_service.GetAdPartnerRequest() + pb_message = taxonomy_category_service.GetTaxonomyCategoryRequest.pb( + taxonomy_category_service.GetTaxonomyCategoryRequest() ) transcode.return_value = { "method": "post", @@ -1164,19 +1220,19 @@ def test_get_ad_partner_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ad_partner_service.AdPartner.to_json( - ad_partner_service.AdPartner() + req.return_value._content = taxonomy_category_messages.TaxonomyCategory.to_json( + taxonomy_category_messages.TaxonomyCategory() ) - request = ad_partner_service.GetAdPartnerRequest() + request = taxonomy_category_service.GetTaxonomyCategoryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ad_partner_service.AdPartner() + post.return_value = taxonomy_category_messages.TaxonomyCategory() - client.get_ad_partner( + client.get_taxonomy_category( request, metadata=[ ("key", "val"), @@ -1188,16 +1244,17 @@ def test_get_ad_partner_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_ad_partner_rest_bad_request( - transport: str = "rest", request_type=ad_partner_service.GetAdPartnerRequest +def test_get_taxonomy_category_rest_bad_request( + transport: str = "rest", + request_type=taxonomy_category_service.GetTaxonomyCategoryRequest, ): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/adPartners/sample2"} + request_init = {"name": "networks/sample1/taxonomyCategories/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1209,11 +1266,11 @@ def test_get_ad_partner_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_ad_partner(request) + client.get_taxonomy_category(request) -def test_get_ad_partner_rest_flattened(): - client = AdPartnerServiceClient( +def test_get_taxonomy_category_rest_flattened(): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1221,10 +1278,10 @@ def test_get_ad_partner_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ad_partner_service.AdPartner() + return_value = taxonomy_category_messages.TaxonomyCategory() # get arguments that satisfy an http rule for this method - sample_request = {"name": "networks/sample1/adPartners/sample2"} + sample_request = {"name": "networks/sample1/taxonomyCategories/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -1236,24 +1293,25 @@ def test_get_ad_partner_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_partner_service.AdPartner.pb(return_value) + return_value = taxonomy_category_messages.TaxonomyCategory.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_ad_partner(**mock_args) + client.get_taxonomy_category(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=networks/*/adPartners/*}" % client.transport._host, args[1] + "%s/v1/{name=networks/*/taxonomyCategories/*}" % client.transport._host, + args[1], ) -def test_get_ad_partner_rest_flattened_error(transport: str = "rest"): - client = AdPartnerServiceClient( +def test_get_taxonomy_category_rest_flattened_error(transport: str = "rest"): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1261,14 +1319,14 @@ def test_get_ad_partner_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_ad_partner( - ad_partner_service.GetAdPartnerRequest(), + client.get_taxonomy_category( + taxonomy_category_service.GetTaxonomyCategoryRequest(), name="name_value", ) -def test_get_ad_partner_rest_error(): - client = AdPartnerServiceClient( +def test_get_taxonomy_category_rest_error(): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -1276,12 +1334,12 @@ def test_get_ad_partner_rest_error(): @pytest.mark.parametrize( "request_type", [ - ad_partner_service.ListAdPartnersRequest, + taxonomy_category_service.ListTaxonomyCategoriesRequest, dict, ], ) -def test_list_ad_partners_rest(request_type): - client = AdPartnerServiceClient( +def test_list_taxonomy_categories_rest(request_type): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1293,7 +1351,7 @@ def test_list_ad_partners_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ad_partner_service.ListAdPartnersResponse( + return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse( next_page_token="next_page_token_value", total_size=1086, ) @@ -1302,24 +1360,26 @@ def test_list_ad_partners_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_partner_service.ListAdPartnersResponse.pb(return_value) + return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_ad_partners(request) + response = client.list_taxonomy_categories(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAdPartnersPager) + assert isinstance(response, pagers.ListTaxonomyCategoriesPager) assert response.next_page_token == "next_page_token_value" assert response.total_size == 1086 -def test_list_ad_partners_rest_use_cached_wrapped_rpc(): +def test_list_taxonomy_categories_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1329,7 +1389,10 @@ def test_list_ad_partners_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_ad_partners in client._transport._wrapped_methods + assert ( + client._transport.list_taxonomy_categories + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -1337,26 +1400,26 @@ def test_list_ad_partners_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_ad_partners + client._transport.list_taxonomy_categories ] = mock_rpc request = {} - client.list_ad_partners(request) + client.list_taxonomy_categories(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_ad_partners(request) + client.list_taxonomy_categories(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_ad_partners_rest_required_fields( - request_type=ad_partner_service.ListAdPartnersRequest, +def test_list_taxonomy_categories_rest_required_fields( + request_type=taxonomy_category_service.ListTaxonomyCategoriesRequest, ): - transport_class = transports.AdPartnerServiceRestTransport + transport_class = transports.TaxonomyCategoryServiceRestTransport request_init = {} request_init["parent"] = "" @@ -1370,7 +1433,7 @@ def test_list_ad_partners_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_ad_partners._get_unset_required_fields(jsonified_request) + ).list_taxonomy_categories._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1379,7 +1442,7 @@ def test_list_ad_partners_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_ad_partners._get_unset_required_fields(jsonified_request) + ).list_taxonomy_categories._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -1396,14 +1459,14 @@ def test_list_ad_partners_rest_required_fields( assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ad_partner_service.ListAdPartnersResponse() + return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1424,25 +1487,27 @@ def test_list_ad_partners_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_partner_service.ListAdPartnersResponse.pb(return_value) + return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_ad_partners(request) + response = client.list_taxonomy_categories(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_ad_partners_rest_unset_required_fields(): - transport = transports.AdPartnerServiceRestTransport( +def test_list_taxonomy_categories_rest_unset_required_fields(): + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_ad_partners._get_unset_required_fields({}) + unset_fields = transport.list_taxonomy_categories._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -1458,27 +1523,29 @@ def test_list_ad_partners_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_ad_partners_rest_interceptors(null_interceptor): - transport = transports.AdPartnerServiceRestTransport( +def test_list_taxonomy_categories_rest_interceptors(null_interceptor): + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.AdPartnerServiceRestInterceptor(), + else transports.TaxonomyCategoryServiceRestInterceptor(), ) - client = AdPartnerServiceClient(transport=transport) + client = TaxonomyCategoryServiceClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AdPartnerServiceRestInterceptor, "post_list_ad_partners" + transports.TaxonomyCategoryServiceRestInterceptor, + "post_list_taxonomy_categories", ) as post, mock.patch.object( - transports.AdPartnerServiceRestInterceptor, "pre_list_ad_partners" + transports.TaxonomyCategoryServiceRestInterceptor, + "pre_list_taxonomy_categories", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = ad_partner_service.ListAdPartnersRequest.pb( - ad_partner_service.ListAdPartnersRequest() + pb_message = taxonomy_category_service.ListTaxonomyCategoriesRequest.pb( + taxonomy_category_service.ListTaxonomyCategoriesRequest() ) transcode.return_value = { "method": "post", @@ -1490,19 +1557,21 @@ def test_list_ad_partners_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ad_partner_service.ListAdPartnersResponse.to_json( - ad_partner_service.ListAdPartnersResponse() + req.return_value._content = ( + taxonomy_category_service.ListTaxonomyCategoriesResponse.to_json( + taxonomy_category_service.ListTaxonomyCategoriesResponse() + ) ) - request = ad_partner_service.ListAdPartnersRequest() + request = taxonomy_category_service.ListTaxonomyCategoriesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ad_partner_service.ListAdPartnersResponse() + post.return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse() - client.list_ad_partners( + client.list_taxonomy_categories( request, metadata=[ ("key", "val"), @@ -1514,10 +1583,11 @@ def test_list_ad_partners_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_ad_partners_rest_bad_request( - transport: str = "rest", request_type=ad_partner_service.ListAdPartnersRequest +def test_list_taxonomy_categories_rest_bad_request( + transport: str = "rest", + request_type=taxonomy_category_service.ListTaxonomyCategoriesRequest, ): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1535,11 +1605,11 @@ def test_list_ad_partners_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_ad_partners(request) + client.list_taxonomy_categories(request) -def test_list_ad_partners_rest_flattened(): - client = AdPartnerServiceClient( +def test_list_taxonomy_categories_rest_flattened(): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1547,7 +1617,7 @@ def test_list_ad_partners_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ad_partner_service.ListAdPartnersResponse() + return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "networks/sample1"} @@ -1562,24 +1632,27 @@ def test_list_ad_partners_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ad_partner_service.ListAdPartnersResponse.pb(return_value) + return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_ad_partners(**mock_args) + client.list_taxonomy_categories(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=networks/*}/adPartners" % client.transport._host, args[1] + "%s/v1/{parent=networks/*}/taxonomyCategories" % client.transport._host, + args[1], ) -def test_list_ad_partners_rest_flattened_error(transport: str = "rest"): - client = AdPartnerServiceClient( +def test_list_taxonomy_categories_rest_flattened_error(transport: str = "rest"): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1587,14 +1660,14 @@ def test_list_ad_partners_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_ad_partners( - ad_partner_service.ListAdPartnersRequest(), + client.list_taxonomy_categories( + taxonomy_category_service.ListTaxonomyCategoriesRequest(), parent="parent_value", ) -def test_list_ad_partners_rest_pager(transport: str = "rest"): - client = AdPartnerServiceClient( +def test_list_taxonomy_categories_rest_pager(transport: str = "rest"): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1605,28 +1678,28 @@ def test_list_ad_partners_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - ad_partner_service.ListAdPartnersResponse( - ad_partners=[ - ad_partner_service.AdPartner(), - ad_partner_service.AdPartner(), - ad_partner_service.AdPartner(), + taxonomy_category_service.ListTaxonomyCategoriesResponse( + taxonomy_categories=[ + taxonomy_category_messages.TaxonomyCategory(), + taxonomy_category_messages.TaxonomyCategory(), + taxonomy_category_messages.TaxonomyCategory(), ], next_page_token="abc", ), - ad_partner_service.ListAdPartnersResponse( - ad_partners=[], + taxonomy_category_service.ListTaxonomyCategoriesResponse( + taxonomy_categories=[], next_page_token="def", ), - ad_partner_service.ListAdPartnersResponse( - ad_partners=[ - ad_partner_service.AdPartner(), + taxonomy_category_service.ListTaxonomyCategoriesResponse( + taxonomy_categories=[ + taxonomy_category_messages.TaxonomyCategory(), ], next_page_token="ghi", ), - ad_partner_service.ListAdPartnersResponse( - ad_partners=[ - ad_partner_service.AdPartner(), - ad_partner_service.AdPartner(), + taxonomy_category_service.ListTaxonomyCategoriesResponse( + taxonomy_categories=[ + taxonomy_category_messages.TaxonomyCategory(), + taxonomy_category_messages.TaxonomyCategory(), ], ), ) @@ -1635,7 +1708,8 @@ def test_list_ad_partners_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - ad_partner_service.ListAdPartnersResponse.to_json(x) for x in response + taxonomy_category_service.ListTaxonomyCategoriesResponse.to_json(x) + for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -1645,46 +1719,48 @@ def test_list_ad_partners_rest_pager(transport: str = "rest"): sample_request = {"parent": "networks/sample1"} - pager = client.list_ad_partners(request=sample_request) + pager = client.list_taxonomy_categories(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, ad_partner_service.AdPartner) for i in results) + assert all( + isinstance(i, taxonomy_category_messages.TaxonomyCategory) for i in results + ) - pages = list(client.list_ad_partners(request=sample_request).pages) + pages = list(client.list_taxonomy_categories(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. - transport = transports.AdPartnerServiceRestTransport( + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. - transport = transports.AdPartnerServiceRestTransport( + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( client_options={"credentials_file": "credentials.json"}, transport=transport, ) # It is an error to provide an api_key and a transport instance. - transport = transports.AdPartnerServiceRestTransport( + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( client_options=options, transport=transport, ) @@ -1693,16 +1769,16 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. - transport = transports.AdPartnerServiceRestTransport( + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -1710,17 +1786,17 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. - transport = transports.AdPartnerServiceRestTransport( + transport = transports.TaxonomyCategoryServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - client = AdPartnerServiceClient(transport=transport) + client = TaxonomyCategoryServiceClient(transport=transport) assert client.transport is transport @pytest.mark.parametrize( "transport_class", [ - transports.AdPartnerServiceRestTransport, + transports.TaxonomyCategoryServiceRestTransport, ], ) def test_transport_adc(transport_class): @@ -1738,36 +1814,36 @@ def test_transport_adc(transport_class): ], ) def test_transport_kind(transport_name): - transport = AdPartnerServiceClient.get_transport_class(transport_name)( + transport = TaxonomyCategoryServiceClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name -def test_ad_partner_service_base_transport_error(): +def test_taxonomy_category_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AdPartnerServiceTransport( + transport = transports.TaxonomyCategoryServiceTransport( credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) -def test_ad_partner_service_base_transport(): +def test_taxonomy_category_service_base_transport(): # Instantiate the base transport. with mock.patch( - "google.ads.admanager_v1.services.ad_partner_service.transports.AdPartnerServiceTransport.__init__" + "google.ads.admanager_v1.services.taxonomy_category_service.transports.TaxonomyCategoryServiceTransport.__init__" ) as Transport: Transport.return_value = None - transport = transports.AdPartnerServiceTransport( + transport = transports.TaxonomyCategoryServiceTransport( credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - "get_ad_partner", - "list_ad_partners", + "get_taxonomy_category", + "list_taxonomy_categories", "get_operation", ) for method in methods: @@ -1786,16 +1862,16 @@ def test_ad_partner_service_base_transport(): getattr(transport, r)() -def test_ad_partner_service_base_transport_with_credentials_file(): +def test_taxonomy_category_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.ads.admanager_v1.services.ad_partner_service.transports.AdPartnerServiceTransport._prep_wrapped_messages" + "google.ads.admanager_v1.services.taxonomy_category_service.transports.TaxonomyCategoryServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AdPartnerServiceTransport( + transport = transports.TaxonomyCategoryServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1807,22 +1883,22 @@ def test_ad_partner_service_base_transport_with_credentials_file(): ) -def test_ad_partner_service_base_transport_with_adc(): +def test_taxonomy_category_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.ads.admanager_v1.services.ad_partner_service.transports.AdPartnerServiceTransport._prep_wrapped_messages" + "google.ads.admanager_v1.services.taxonomy_category_service.transports.TaxonomyCategoryServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AdPartnerServiceTransport() + transport = transports.TaxonomyCategoryServiceTransport() adc.assert_called_once() -def test_ad_partner_service_auth_adc(): +def test_taxonomy_category_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AdPartnerServiceClient() + TaxonomyCategoryServiceClient() adc.assert_called_once_with( scopes=None, default_scopes=(), @@ -1830,12 +1906,12 @@ def test_ad_partner_service_auth_adc(): ) -def test_ad_partner_service_http_transport_client_cert_source_for_mtls(): +def test_taxonomy_category_service_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" ) as mock_configure_mtls_channel: - transports.AdPartnerServiceRestTransport( + transports.TaxonomyCategoryServiceRestTransport( credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) @@ -1847,8 +1923,8 @@ def test_ad_partner_service_http_transport_client_cert_source_for_mtls(): "rest", ], ) -def test_ad_partner_service_host_no_port(transport_name): - client = AdPartnerServiceClient( +def test_taxonomy_category_service_host_no_port(transport_name): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="admanager.googleapis.com" @@ -1868,8 +1944,8 @@ def test_ad_partner_service_host_no_port(transport_name): "rest", ], ) -def test_ad_partner_service_host_with_port(transport_name): - client = AdPartnerServiceClient( +def test_taxonomy_category_service_host_with_port(transport_name): + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="admanager.googleapis.com:8000" @@ -1889,65 +1965,67 @@ def test_ad_partner_service_host_with_port(transport_name): "rest", ], ) -def test_ad_partner_service_client_transport_session_collision(transport_name): +def test_taxonomy_category_service_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() - client1 = AdPartnerServiceClient( + client1 = TaxonomyCategoryServiceClient( credentials=creds1, transport=transport_name, ) - client2 = AdPartnerServiceClient( + client2 = TaxonomyCategoryServiceClient( credentials=creds2, transport=transport_name, ) - session1 = client1.transport.get_ad_partner._session - session2 = client2.transport.get_ad_partner._session + session1 = client1.transport.get_taxonomy_category._session + session2 = client2.transport.get_taxonomy_category._session assert session1 != session2 - session1 = client1.transport.list_ad_partners._session - session2 = client2.transport.list_ad_partners._session + session1 = client1.transport.list_taxonomy_categories._session + session2 = client2.transport.list_taxonomy_categories._session assert session1 != session2 -def test_ad_partner_path(): +def test_network_path(): network_code = "squid" - ad_partner = "clam" - expected = "networks/{network_code}/adPartners/{ad_partner}".format( + expected = "networks/{network_code}".format( network_code=network_code, - ad_partner=ad_partner, ) - actual = AdPartnerServiceClient.ad_partner_path(network_code, ad_partner) + actual = TaxonomyCategoryServiceClient.network_path(network_code) assert expected == actual -def test_parse_ad_partner_path(): +def test_parse_network_path(): expected = { - "network_code": "whelk", - "ad_partner": "octopus", + "network_code": "clam", } - path = AdPartnerServiceClient.ad_partner_path(**expected) + path = TaxonomyCategoryServiceClient.network_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_ad_partner_path(path) + actual = TaxonomyCategoryServiceClient.parse_network_path(path) assert expected == actual -def test_network_path(): - network_code = "oyster" - expected = "networks/{network_code}".format( +def test_taxonomy_category_path(): + network_code = "whelk" + taxonomy_category = "octopus" + expected = "networks/{network_code}/taxonomyCategories/{taxonomy_category}".format( network_code=network_code, + taxonomy_category=taxonomy_category, + ) + actual = TaxonomyCategoryServiceClient.taxonomy_category_path( + network_code, taxonomy_category ) - actual = AdPartnerServiceClient.network_path(network_code) assert expected == actual -def test_parse_network_path(): +def test_parse_taxonomy_category_path(): expected = { - "network_code": "nudibranch", + "network_code": "oyster", + "taxonomy_category": "nudibranch", } - path = AdPartnerServiceClient.network_path(**expected) + path = TaxonomyCategoryServiceClient.taxonomy_category_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_network_path(path) + actual = TaxonomyCategoryServiceClient.parse_taxonomy_category_path(path) assert expected == actual @@ -1956,7 +2034,7 @@ def test_common_billing_account_path(): expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) - actual = AdPartnerServiceClient.common_billing_account_path(billing_account) + actual = TaxonomyCategoryServiceClient.common_billing_account_path(billing_account) assert expected == actual @@ -1964,10 +2042,10 @@ def test_parse_common_billing_account_path(): expected = { "billing_account": "mussel", } - path = AdPartnerServiceClient.common_billing_account_path(**expected) + path = TaxonomyCategoryServiceClient.common_billing_account_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_common_billing_account_path(path) + actual = TaxonomyCategoryServiceClient.parse_common_billing_account_path(path) assert expected == actual @@ -1976,7 +2054,7 @@ def test_common_folder_path(): expected = "folders/{folder}".format( folder=folder, ) - actual = AdPartnerServiceClient.common_folder_path(folder) + actual = TaxonomyCategoryServiceClient.common_folder_path(folder) assert expected == actual @@ -1984,10 +2062,10 @@ def test_parse_common_folder_path(): expected = { "folder": "nautilus", } - path = AdPartnerServiceClient.common_folder_path(**expected) + path = TaxonomyCategoryServiceClient.common_folder_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_common_folder_path(path) + actual = TaxonomyCategoryServiceClient.parse_common_folder_path(path) assert expected == actual @@ -1996,7 +2074,7 @@ def test_common_organization_path(): expected = "organizations/{organization}".format( organization=organization, ) - actual = AdPartnerServiceClient.common_organization_path(organization) + actual = TaxonomyCategoryServiceClient.common_organization_path(organization) assert expected == actual @@ -2004,10 +2082,10 @@ def test_parse_common_organization_path(): expected = { "organization": "abalone", } - path = AdPartnerServiceClient.common_organization_path(**expected) + path = TaxonomyCategoryServiceClient.common_organization_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_common_organization_path(path) + actual = TaxonomyCategoryServiceClient.parse_common_organization_path(path) assert expected == actual @@ -2016,7 +2094,7 @@ def test_common_project_path(): expected = "projects/{project}".format( project=project, ) - actual = AdPartnerServiceClient.common_project_path(project) + actual = TaxonomyCategoryServiceClient.common_project_path(project) assert expected == actual @@ -2024,10 +2102,10 @@ def test_parse_common_project_path(): expected = { "project": "clam", } - path = AdPartnerServiceClient.common_project_path(**expected) + path = TaxonomyCategoryServiceClient.common_project_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_common_project_path(path) + actual = TaxonomyCategoryServiceClient.parse_common_project_path(path) assert expected == actual @@ -2038,7 +2116,7 @@ def test_common_location_path(): project=project, location=location, ) - actual = AdPartnerServiceClient.common_location_path(project, location) + actual = TaxonomyCategoryServiceClient.common_location_path(project, location) assert expected == actual @@ -2047,10 +2125,10 @@ def test_parse_common_location_path(): "project": "oyster", "location": "nudibranch", } - path = AdPartnerServiceClient.common_location_path(**expected) + path = TaxonomyCategoryServiceClient.common_location_path(**expected) # Check that the path construction is reversible. - actual = AdPartnerServiceClient.parse_common_location_path(path) + actual = TaxonomyCategoryServiceClient.parse_common_location_path(path) assert expected == actual @@ -2058,18 +2136,18 @@ def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( - transports.AdPartnerServiceTransport, "_prep_wrapped_messages" + transports.TaxonomyCategoryServiceTransport, "_prep_wrapped_messages" ) as prep: - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) with mock.patch.object( - transports.AdPartnerServiceTransport, "_prep_wrapped_messages" + transports.TaxonomyCategoryServiceTransport, "_prep_wrapped_messages" ) as prep: - transport_class = AdPartnerServiceClient.get_transport_class() + transport_class = TaxonomyCategoryServiceClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, @@ -2080,14 +2158,14 @@ def test_client_with_default_client_info(): def test_get_operation_rest_bad_request( transport: str = "rest", request_type=operations_pb2.GetOperationRequest ): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2110,11 +2188,11 @@ def test_get_operation_rest_bad_request( ], ) def test_get_operation_rest(request_type): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: @@ -2141,7 +2219,7 @@ def test_transport_close(): } for transport, close_name in transports.items(): - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object( @@ -2157,7 +2235,7 @@ def test_client_ctx(): "rest", ] for transport in transports: - client = AdPartnerServiceClient( + client = TaxonomyCategoryServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. @@ -2171,7 +2249,10 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (AdPartnerServiceClient, transports.AdPartnerServiceRestTransport), + ( + TaxonomyCategoryServiceClient, + transports.TaxonomyCategoryServiceRestTransport, + ), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_team_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_team_service.py deleted file mode 100644 index 44ad7f4a8595..000000000000 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_team_service.py +++ /dev/null @@ -1,2145 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os - -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -from collections.abc import Iterable -import json -import math - -from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template -from google.api_core import api_core_version, client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -import google.auth -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import json_format -import grpc -from grpc.experimental import aio -from proto.marshal.rules import wrappers -from proto.marshal.rules.dates import DurationRule, TimestampRule -import pytest -from requests import PreparedRequest, Request, Response -from requests.sessions import Session - -from google.ads.admanager_v1.services.team_service import ( - TeamServiceClient, - pagers, - transports, -) -from google.ads.admanager_v1.types import team_service - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return ( - "foo.googleapis.com" - if ("localhost" in client.DEFAULT_ENDPOINT) - else client.DEFAULT_ENDPOINT - ) - - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return ( - "test.{UNIVERSE_DOMAIN}" - if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) - else client._DEFAULT_ENDPOINT_TEMPLATE - ) - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert TeamServiceClient._get_default_mtls_endpoint(None) is None - assert ( - TeamServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - ) - assert ( - TeamServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) - == api_mtls_endpoint - ) - assert ( - TeamServiceClient._get_default_mtls_endpoint(sandbox_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - TeamServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) - == sandbox_mtls_endpoint - ) - assert TeamServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -def test__read_environment_variables(): - assert TeamServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert TeamServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert TeamServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - TeamServiceClient._read_environment_variables() - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert TeamServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert TeamServiceClient._read_environment_variables() == ( - False, - "always", - None, - ) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert TeamServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - TeamServiceClient._read_environment_variables() - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert TeamServiceClient._read_environment_variables() == ( - False, - "auto", - "foo.com", - ) - - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert TeamServiceClient._get_client_cert_source(None, False) is None - assert ( - TeamServiceClient._get_client_cert_source(mock_provided_cert_source, False) - is None - ) - assert ( - TeamServiceClient._get_client_cert_source(mock_provided_cert_source, True) - == mock_provided_cert_source - ) - - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", return_value=True - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=mock_default_cert_source, - ): - assert ( - TeamServiceClient._get_client_cert_source(None, True) - is mock_default_cert_source - ) - assert ( - TeamServiceClient._get_client_cert_source( - mock_provided_cert_source, "true" - ) - is mock_provided_cert_source - ) - - -@mock.patch.object( - TeamServiceClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(TeamServiceClient), -) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = TeamServiceClient._DEFAULT_UNIVERSE - default_endpoint = TeamServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=default_universe - ) - mock_universe = "bar.com" - mock_endpoint = TeamServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=mock_universe - ) - - assert ( - TeamServiceClient._get_api_endpoint( - api_override, mock_client_cert_source, default_universe, "always" - ) - == api_override - ) - assert ( - TeamServiceClient._get_api_endpoint( - None, mock_client_cert_source, default_universe, "auto" - ) - == TeamServiceClient.DEFAULT_MTLS_ENDPOINT - ) - assert ( - TeamServiceClient._get_api_endpoint(None, None, default_universe, "auto") - == default_endpoint - ) - assert ( - TeamServiceClient._get_api_endpoint(None, None, default_universe, "always") - == TeamServiceClient.DEFAULT_MTLS_ENDPOINT - ) - assert ( - TeamServiceClient._get_api_endpoint( - None, mock_client_cert_source, default_universe, "always" - ) - == TeamServiceClient.DEFAULT_MTLS_ENDPOINT - ) - assert ( - TeamServiceClient._get_api_endpoint(None, None, mock_universe, "never") - == mock_endpoint - ) - assert ( - TeamServiceClient._get_api_endpoint(None, None, default_universe, "never") - == default_endpoint - ) - - with pytest.raises(MutualTLSChannelError) as excinfo: - TeamServiceClient._get_api_endpoint( - None, mock_client_cert_source, mock_universe, "auto" - ) - assert ( - str(excinfo.value) - == "mTLS is not supported in any universe other than googleapis.com." - ) - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert ( - TeamServiceClient._get_universe_domain( - client_universe_domain, universe_domain_env - ) - == client_universe_domain - ) - assert ( - TeamServiceClient._get_universe_domain(None, universe_domain_env) - == universe_domain_env - ) - assert ( - TeamServiceClient._get_universe_domain(None, None) - == TeamServiceClient._DEFAULT_UNIVERSE - ) - - with pytest.raises(ValueError) as excinfo: - TeamServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TeamServiceClient, transports.TeamServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (TeamServiceClient, "rest"), - ], -) -def test_team_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_info" - ) as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "admanager.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_class,transport_name", - [ - (transports.TeamServiceRestTransport, "rest"), - ], -) -def test_team_service_client_service_account_always_use_jwt( - transport_class, transport_name -): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (TeamServiceClient, "rest"), - ], -) -def test_team_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_file" - ) as factory: - factory.return_value = creds - client = client_class.from_service_account_file( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "admanager.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" - ) - - -def test_team_service_client_get_transport_class(): - transport = TeamServiceClient.get_transport_class() - available_transports = [ - transports.TeamServiceRestTransport, - ] - assert transport in available_transports - - transport = TeamServiceClient.get_transport_class("rest") - assert transport == transports.TeamServiceRestTransport - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TeamServiceClient, transports.TeamServiceRestTransport, "rest"), - ], -) -@mock.patch.object( - TeamServiceClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(TeamServiceClient), -) -def test_team_service_client_client_options( - client_class, transport_class, transport_name -): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(TeamServiceClient, "get_transport_class") as gtc: - transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(TeamServiceClient, "get_transport_class") as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions( - api_audience="https://language.googleapis.com" - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com", - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,use_client_cert_env", - [ - (TeamServiceClient, transports.TeamServiceRestTransport, "rest", "true"), - (TeamServiceClient, transports.TeamServiceRestTransport, "rest", "false"), - ], -) -@mock.patch.object( - TeamServiceClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(TeamServiceClient), -) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_team_service_client_mtls_env_auto( - client_class, transport_class, transport_name, use_client_cert_env -): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=client_cert_source_callback, - ): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [TeamServiceClient]) -@mock.patch.object( - TeamServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TeamServiceClient) -) -def test_team_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=mock_client_cert_source, - ): - ( - api_endpoint, - cert_source, - ) = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - - -@pytest.mark.parametrize("client_class", [TeamServiceClient]) -@mock.patch.object( - TeamServiceClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(TeamServiceClient), -) -def test_team_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = TeamServiceClient._DEFAULT_UNIVERSE - default_endpoint = TeamServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=default_universe - ) - mock_universe = "bar.com" - mock_endpoint = TeamServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=mock_universe - ) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ): - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=api_override - ) - client = client_class( - client_options=options, - credentials=ga_credentials.AnonymousCredentials(), - ) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - else: - client = client_class( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - assert client.api_endpoint == ( - mock_endpoint if universe_exists else default_endpoint - ) - assert client.universe_domain == ( - mock_universe if universe_exists else default_universe - ) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TeamServiceClient, transports.TeamServiceRestTransport, "rest"), - ], -) -def test_team_service_client_client_options_scopes( - client_class, transport_class, transport_name -): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - (TeamServiceClient, transports.TeamServiceRestTransport, "rest", None), - ], -) -def test_team_service_client_client_options_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "request_type", - [ - team_service.GetTeamRequest, - dict, - ], -) -def test_get_team_rest(request_type): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/teams/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = team_service.Team( - name="name_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = team_service.Team.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_team(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, team_service.Team) - assert response.name == "name_value" - - -def test_get_team_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_team in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_team] = mock_rpc - - request = {} - client.get_team(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_team(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_team_rest_required_fields(request_type=team_service.GetTeamRequest): - transport_class = transports.TeamServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_team._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_team._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = team_service.Team() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = team_service.Team.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_team(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_team_rest_unset_required_fields(): - transport = transports.TeamServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_team._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_team_rest_interceptors(null_interceptor): - transport = transports.TeamServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TeamServiceRestInterceptor(), - ) - client = TeamServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TeamServiceRestInterceptor, "post_get_team" - ) as post, mock.patch.object( - transports.TeamServiceRestInterceptor, "pre_get_team" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = team_service.GetTeamRequest.pb(team_service.GetTeamRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = team_service.Team.to_json(team_service.Team()) - - request = team_service.GetTeamRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = team_service.Team() - - client.get_team( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_team_rest_bad_request( - transport: str = "rest", request_type=team_service.GetTeamRequest -): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/teams/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_team(request) - - -def test_get_team_rest_flattened(): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = team_service.Team() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "networks/sample1/teams/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = team_service.Team.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_team(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=networks/*/teams/*}" % client.transport._host, args[1] - ) - - -def test_get_team_rest_flattened_error(transport: str = "rest"): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_team( - team_service.GetTeamRequest(), - name="name_value", - ) - - -def test_get_team_rest_error(): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - team_service.ListTeamsRequest, - dict, - ], -) -def test_list_teams_rest(request_type): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = team_service.ListTeamsResponse( - next_page_token="next_page_token_value", - total_size=1086, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = team_service.ListTeamsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_teams(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTeamsPager) - assert response.next_page_token == "next_page_token_value" - assert response.total_size == 1086 - - -def test_list_teams_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_teams in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_teams] = mock_rpc - - request = {} - client.list_teams(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_teams(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_teams_rest_required_fields(request_type=team_service.ListTeamsRequest): - transport_class = transports.TeamServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_teams._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_teams._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "skip", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = team_service.ListTeamsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = team_service.ListTeamsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_teams(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_teams_rest_unset_required_fields(): - transport = transports.TeamServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_teams._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "skip", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_teams_rest_interceptors(null_interceptor): - transport = transports.TeamServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.TeamServiceRestInterceptor(), - ) - client = TeamServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.TeamServiceRestInterceptor, "post_list_teams" - ) as post, mock.patch.object( - transports.TeamServiceRestInterceptor, "pre_list_teams" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = team_service.ListTeamsRequest.pb(team_service.ListTeamsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = team_service.ListTeamsResponse.to_json( - team_service.ListTeamsResponse() - ) - - request = team_service.ListTeamsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = team_service.ListTeamsResponse() - - client.list_teams( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_teams_rest_bad_request( - transport: str = "rest", request_type=team_service.ListTeamsRequest -): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_teams(request) - - -def test_list_teams_rest_flattened(): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = team_service.ListTeamsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "networks/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = team_service.ListTeamsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_teams(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=networks/*}/teams" % client.transport._host, args[1] - ) - - -def test_list_teams_rest_flattened_error(transport: str = "rest"): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_teams( - team_service.ListTeamsRequest(), - parent="parent_value", - ) - - -def test_list_teams_rest_pager(transport: str = "rest"): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - team_service.ListTeamsResponse( - teams=[ - team_service.Team(), - team_service.Team(), - team_service.Team(), - ], - next_page_token="abc", - ), - team_service.ListTeamsResponse( - teams=[], - next_page_token="def", - ), - team_service.ListTeamsResponse( - teams=[ - team_service.Team(), - ], - next_page_token="ghi", - ), - team_service.ListTeamsResponse( - teams=[ - team_service.Team(), - team_service.Team(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(team_service.ListTeamsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "networks/sample1"} - - pager = client.list_teams(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, team_service.Team) for i in results) - - pages = list(client.list_teams(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.TeamServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.TeamServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TeamServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.TeamServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = TeamServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = TeamServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.TeamServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TeamServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.TeamServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = TeamServiceClient(transport=transport) - assert client.transport is transport - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.TeamServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_transport_kind(transport_name): - transport = TeamServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_team_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.TeamServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_team_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.ads.admanager_v1.services.team_service.transports.TeamServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.TeamServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "get_team", - "list_teams", - "get_operation", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_team_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.ads.admanager_v1.services.team_service.transports.TeamServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TeamServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=(), - quota_project_id="octopus", - ) - - -def test_team_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.ads.admanager_v1.services.team_service.transports.TeamServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TeamServiceTransport() - adc.assert_called_once() - - -def test_team_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TeamServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=(), - quota_project_id=None, - ) - - -def test_team_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.TeamServiceRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_team_service_host_no_port(transport_name): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="admanager.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "admanager.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_team_service_host_with_port(transport_name): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="admanager.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "admanager.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_team_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = TeamServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = TeamServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_team._session - session2 = client2.transport.get_team._session - assert session1 != session2 - session1 = client1.transport.list_teams._session - session2 = client2.transport.list_teams._session - assert session1 != session2 - - -def test_network_path(): - network_code = "squid" - expected = "networks/{network_code}".format( - network_code=network_code, - ) - actual = TeamServiceClient.network_path(network_code) - assert expected == actual - - -def test_parse_network_path(): - expected = { - "network_code": "clam", - } - path = TeamServiceClient.network_path(**expected) - - # Check that the path construction is reversible. - actual = TeamServiceClient.parse_network_path(path) - assert expected == actual - - -def test_team_path(): - network_code = "whelk" - team = "octopus" - expected = "networks/{network_code}/teams/{team}".format( - network_code=network_code, - team=team, - ) - actual = TeamServiceClient.team_path(network_code, team) - assert expected == actual - - -def test_parse_team_path(): - expected = { - "network_code": "oyster", - "team": "nudibranch", - } - path = TeamServiceClient.team_path(**expected) - - # Check that the path construction is reversible. - actual = TeamServiceClient.parse_team_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = TeamServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = TeamServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = TeamServiceClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = TeamServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = TeamServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = TeamServiceClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = TeamServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = TeamServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = TeamServiceClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = TeamServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = TeamServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = TeamServiceClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = TeamServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = TeamServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = TeamServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.TeamServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.TeamServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = TeamServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_transport_close(): - transports = { - "rest": "_session", - } - - for transport, close_name in transports.items(): - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - "rest", - ] - for transport in transports: - client = TeamServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - - -@pytest.mark.parametrize( - "client_class,transport_class", - [ - (TeamServiceClient, transports.TeamServiceRestTransport), - ], -) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py index a51726932d23..80cbbb9f0809 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py @@ -44,12 +44,8 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.ads.admanager_v1.services.user_service import ( - UserServiceClient, - pagers, - transports, -) -from google.ads.admanager_v1.types import user_service +from google.ads.admanager_v1.services.user_service import UserServiceClient, transports +from google.ads.admanager_v1.types import user_messages, user_service def client_cert_source_callback(): @@ -951,7 +947,7 @@ def test_get_user_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = user_service.User( + return_value = user_messages.User( name="name_value", user_id=747, display_name="display_name_value", @@ -967,7 +963,7 @@ def test_get_user_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = user_service.User.pb(return_value) + return_value = user_messages.User.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -975,7 +971,7 @@ def test_get_user_rest(request_type): response = client.get_user(request) # Establish that the response is the type that we expect. - assert isinstance(response, user_service.User) + assert isinstance(response, user_messages.User) assert response.name == "name_value" assert response.user_id == 747 assert response.display_name == "display_name_value" @@ -1061,7 +1057,7 @@ def test_get_user_rest_required_fields(request_type=user_service.GetUserRequest) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = user_service.User() + return_value = user_messages.User() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1082,7 +1078,7 @@ def test_get_user_rest_required_fields(request_type=user_service.GetUserRequest) response_value.status_code = 200 # Convert return value to protobuf type - return_value = user_service.User.pb(return_value) + return_value = user_messages.User.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") @@ -1135,7 +1131,7 @@ def test_get_user_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = user_service.User.to_json(user_service.User()) + req.return_value._content = user_messages.User.to_json(user_messages.User()) request = user_service.GetUserRequest() metadata = [ @@ -1143,7 +1139,7 @@ def test_get_user_rest_interceptors(null_interceptor): ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = user_service.User() + post.return_value = user_messages.User() client.get_user( request, @@ -1190,7 +1186,7 @@ def test_get_user_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = user_service.User() + return_value = user_messages.User() # get arguments that satisfy an http rule for this method sample_request = {"name": "networks/sample1/users/sample2"} @@ -1205,7 +1201,7 @@ def test_get_user_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = user_service.User.pb(return_value) + return_value = user_messages.User.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1242,381 +1238,6 @@ def test_get_user_rest_error(): ) -@pytest.mark.parametrize( - "request_type", - [ - user_service.ListUsersRequest, - dict, - ], -) -def test_list_users_rest(request_type): - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = user_service.ListUsersResponse( - next_page_token="next_page_token_value", - total_size=1086, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = user_service.ListUsersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_users(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListUsersPager) - assert response.next_page_token == "next_page_token_value" - assert response.total_size == 1086 - - -def test_list_users_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_users in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_users] = mock_rpc - - request = {} - client.list_users(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_users(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_users_rest_required_fields(request_type=user_service.ListUsersRequest): - transport_class = transports.UserServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_users._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_users._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "skip", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = user_service.ListUsersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = user_service.ListUsersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_users(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_users_rest_unset_required_fields(): - transport = transports.UserServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_users._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "skip", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_users_rest_interceptors(null_interceptor): - transport = transports.UserServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.UserServiceRestInterceptor(), - ) - client = UserServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.UserServiceRestInterceptor, "post_list_users" - ) as post, mock.patch.object( - transports.UserServiceRestInterceptor, "pre_list_users" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = user_service.ListUsersRequest.pb(user_service.ListUsersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = user_service.ListUsersResponse.to_json( - user_service.ListUsersResponse() - ) - - request = user_service.ListUsersRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = user_service.ListUsersResponse() - - client.list_users( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_users_rest_bad_request( - transport: str = "rest", request_type=user_service.ListUsersRequest -): - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_users(request) - - -def test_list_users_rest_flattened(): - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = user_service.ListUsersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "networks/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = user_service.ListUsersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_users(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=networks/*}/users" % client.transport._host, args[1] - ) - - -def test_list_users_rest_flattened_error(transport: str = "rest"): - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_users( - user_service.ListUsersRequest(), - parent="parent_value", - ) - - -def test_list_users_rest_pager(transport: str = "rest"): - client = UserServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - user_service.ListUsersResponse( - users=[ - user_service.User(), - user_service.User(), - user_service.User(), - ], - next_page_token="abc", - ), - user_service.ListUsersResponse( - users=[], - next_page_token="def", - ), - user_service.ListUsersResponse( - users=[ - user_service.User(), - ], - next_page_token="ghi", - ), - user_service.ListUsersResponse( - users=[ - user_service.User(), - user_service.User(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(user_service.ListUsersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "networks/sample1"} - - pager = client.list_users(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, user_service.User) for i in results) - - pages = list(client.list_users(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.UserServiceRestTransport( @@ -1728,7 +1349,6 @@ def test_user_service_base_transport(): # raise NotImplementedError. methods = ( "get_user", - "list_users", "get_operation", ) for method in methods: @@ -1864,34 +1484,11 @@ def test_user_service_client_transport_session_collision(transport_name): session1 = client1.transport.get_user._session session2 = client2.transport.get_user._session assert session1 != session2 - session1 = client1.transport.list_users._session - session2 = client2.transport.list_users._session - assert session1 != session2 - - -def test_network_path(): - network_code = "squid" - expected = "networks/{network_code}".format( - network_code=network_code, - ) - actual = UserServiceClient.network_path(network_code) - assert expected == actual - - -def test_parse_network_path(): - expected = { - "network_code": "clam", - } - path = UserServiceClient.network_path(**expected) - - # Check that the path construction is reversible. - actual = UserServiceClient.parse_network_path(path) - assert expected == actual def test_role_path(): - network_code = "whelk" - role = "octopus" + network_code = "squid" + role = "clam" expected = "networks/{network_code}/roles/{role}".format( network_code=network_code, role=role, @@ -1902,8 +1499,8 @@ def test_role_path(): def test_parse_role_path(): expected = { - "network_code": "oyster", - "role": "nudibranch", + "network_code": "whelk", + "role": "octopus", } path = UserServiceClient.role_path(**expected) @@ -1913,8 +1510,8 @@ def test_parse_role_path(): def test_user_path(): - network_code = "cuttlefish" - user = "mussel" + network_code = "oyster" + user = "nudibranch" expected = "networks/{network_code}/users/{user}".format( network_code=network_code, user=user, @@ -1925,8 +1522,8 @@ def test_user_path(): def test_parse_user_path(): expected = { - "network_code": "winkle", - "user": "nautilus", + "network_code": "cuttlefish", + "user": "mussel", } path = UserServiceClient.user_path(**expected) @@ -1936,7 +1533,7 @@ def test_parse_user_path(): def test_common_billing_account_path(): - billing_account = "scallop" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -1946,7 +1543,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "abalone", + "billing_account": "nautilus", } path = UserServiceClient.common_billing_account_path(**expected) @@ -1956,7 +1553,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "squid" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -1966,7 +1563,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "clam", + "folder": "abalone", } path = UserServiceClient.common_folder_path(**expected) @@ -1976,7 +1573,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "whelk" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -1986,7 +1583,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "octopus", + "organization": "clam", } path = UserServiceClient.common_organization_path(**expected) @@ -1996,7 +1593,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "oyster" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -2006,7 +1603,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nudibranch", + "project": "octopus", } path = UserServiceClient.common_project_path(**expected) @@ -2016,8 +1613,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "cuttlefish" - location = "mussel" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -2028,8 +1625,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "winkle", - "location": "nautilus", + "project": "cuttlefish", + "location": "mussel", } path = UserServiceClient.common_location_path(**expected) @@ -2071,7 +1668,7 @@ def test_get_operation_rest_bad_request( request = request_type() request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request + {"name": "networks/sample1/operations/reports/runs/sample2"}, request ) # Mock the http request call within the method and fake a BadRequest error. @@ -2098,7 +1695,7 @@ def test_get_operation_rest(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} + request_init = {"name": "networks/sample1/operations/reports/runs/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: diff --git a/packages/google-ads-marketingplatform-admin/.OwlBot.yaml b/packages/google-ads-marketingplatform-admin/.OwlBot.yaml new file mode 100644 index 000000000000..d397bf3a63bd --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/marketingplatform/admin/(v.*)/.*-py + dest: /owl-bot-staging/google-ads-marketingplatform-admin/$1 +api-name: google-ads-marketingplatform-admin diff --git a/packages/google-ads-marketingplatform-admin/.coveragerc b/packages/google-ads-marketingplatform-admin/.coveragerc new file mode 100644 index 000000000000..f2b0df425e8e --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/ads/marketingplatform_admin/__init__.py + google/ads/marketingplatform_admin/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-ads-marketingplatform-admin/.flake8 b/packages/google-ads-marketingplatform-admin/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-ads-marketingplatform-admin/.gitignore b/packages/google-ads-marketingplatform-admin/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-ads-marketingplatform-admin/.repo-metadata.json b/packages/google-ads-marketingplatform-admin/.repo-metadata.json new file mode 100644 index 000000000000..094cd0a04207 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-ads-marketingplatform-admin", + "name_pretty": "Google Marketing Platform Admin API", + "api_description": "The Google Marketing Platform Admin API allows for programmatic access to the Google Marketing Platform configuration data. You can use the Google Marketing Platform Admin API to manage links between your Google Marketing Platform organization and Google Analytics accounts, and to set the service level of your GA4 properties.", + "product_documentation": "https://developers.google.com/analytics/devguides/config/gmp/v1", + "client_documentation": "https://googleapis.dev/python/google-ads-marketingplatform-admin/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-ads-marketingplatform-admin", + "api_id": "marketingplatformadmin.googleapis.com", + "default_version": "v1alpha", + "codeowner_team": "", + "api_shortname": "marketingplatformadmin" +} diff --git a/packages/google-ads-marketingplatform-admin/CHANGELOG.md b/packages/google-ads-marketingplatform-admin/CHANGELOG.md new file mode 100644 index 000000000000..f8676c0292af --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.1.0 (2024-09-05) + + +### Features + +* add initial files for google.marketingplatform.admin.v1alpha ([#13060](https://github.com/googleapis/google-cloud-python/issues/13060)) ([2bbab3b](https://github.com/googleapis/google-cloud-python/commit/2bbab3bea1548fdb6200856ffe4a6fe7d6bf8487)) + +## Changelog diff --git a/packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md b/packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst b/packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst new file mode 100644 index 000000000000..a7223a5e0b19 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/CONTRIBUTING.rst @@ -0,0 +1,271 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.12 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11 and 3.12. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-ads-marketingplatform-admin + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-ads-marketingplatform-admin/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-ads-marketingplatform-admin/LICENSE b/packages/google-ads-marketingplatform-admin/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-ads-marketingplatform-admin/MANIFEST.in b/packages/google-ads-marketingplatform-admin/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-ads-marketingplatform-admin/README.rst b/packages/google-ads-marketingplatform-admin/README.rst new file mode 100644 index 000000000000..0c99d23ee7c1 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/README.rst @@ -0,0 +1,108 @@ +Python Client for Google Marketing Platform Admin API +===================================================== + +|preview| |pypi| |versions| + +`Google Marketing Platform Admin API`_: The Google Marketing Platform Admin API allows for programmatic access to the Google Marketing Platform configuration data. You can use the Google Marketing Platform Admin API to manage links between your Google Marketing Platform organization and Google Analytics accounts, and to set the service level of your GA4 properties. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-ads-marketingplatform-admin.svg + :target: https://pypi.org/project/google-ads-marketingplatform-admin/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-ads-marketingplatform-admin.svg + :target: https://pypi.org/project/google-ads-marketingplatform-admin/ +.. _Google Marketing Platform Admin API: https://developers.google.com/analytics/devguides/config/gmp/v1 +.. _Client Library Documentation: https://googleapis.dev/python/google-ads-marketingplatform-admin/latest +.. _Product Documentation: https://developers.google.com/analytics/devguides/config/gmp/v1 + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Marketing Platform Admin API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Marketing Platform Admin API.: https://developers.google.com/analytics/devguides/config/gmp/v1 +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-ads-marketingplatform-admin + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-ads-marketingplatform-admin + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Marketing Platform Admin API + to see other available methods on the client. +- Read the `Google Marketing Platform Admin API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Marketing Platform Admin API Product documentation: https://developers.google.com/analytics/devguides/config/gmp/v1 +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md b/packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/docs/README.rst b/packages/google-ads-marketingplatform-admin/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/docs/_static/custom.css b/packages/google-ads-marketingplatform-admin/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-ads-marketingplatform-admin/docs/_templates/layout.html b/packages/google-ads-marketingplatform-admin/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-ads-marketingplatform-admin/docs/conf.py b/packages/google-ads-marketingplatform-admin/docs/conf.py new file mode 100644 index 000000000000..5c68a3e1a72f --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-ads-marketingplatform-admin documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-ads-marketingplatform-admin" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-ads-marketingplatform-admin", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-ads-marketingplatform-admin-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-ads-marketingplatform-admin.tex", + "google-ads-marketingplatform-admin Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-ads-marketingplatform-admin", + "google-ads-marketingplatform-admin Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-ads-marketingplatform-admin", + "google-ads-marketingplatform-admin Documentation", + author, + "google-ads-marketingplatform-admin", + "google-ads-marketingplatform-admin Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-ads-marketingplatform-admin/docs/index.rst b/packages/google-ads-marketingplatform-admin/docs/index.rst new file mode 100644 index 000000000000..b217f5c4b15d --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + marketingplatform_admin_v1alpha/services_ + marketingplatform_admin_v1alpha/types_ + + +Changelog +--------- + +For a list of all ``google-ads-marketingplatform-admin`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst new file mode 100644 index 000000000000..938e180ef989 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/marketingplatform_admin_service.rst @@ -0,0 +1,10 @@ +MarketingplatformAdminService +----------------------------------------------- + +.. automodule:: google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service + :members: + :inherited-members: + +.. automodule:: google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst new file mode 100644 index 000000000000..427be3b19a50 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/services_.rst @@ -0,0 +1,6 @@ +Services for Google Ads Marketingplatform Admin v1alpha API +=========================================================== +.. toctree:: + :maxdepth: 2 + + marketingplatform_admin_service diff --git a/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst new file mode 100644 index 000000000000..829ca4ea07f4 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/marketingplatform_admin_v1alpha/types_.rst @@ -0,0 +1,6 @@ +Types for Google Ads Marketingplatform Admin v1alpha API +======================================================== + +.. automodule:: google.ads.marketingplatform_admin_v1alpha.types + :members: + :show-inheritance: diff --git a/packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst b/packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py new file mode 100644 index 000000000000..56669ac018e6 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.ads.marketingplatform_admin import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.async_client import ( + MarketingplatformAdminServiceAsyncClient, +) +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.client import ( + MarketingplatformAdminServiceClient, +) +from google.ads.marketingplatform_admin_v1alpha.types.marketingplatform_admin import ( + AnalyticsServiceLevel, + CreateAnalyticsAccountLinkRequest, + DeleteAnalyticsAccountLinkRequest, + GetOrganizationRequest, + ListAnalyticsAccountLinksRequest, + ListAnalyticsAccountLinksResponse, + SetPropertyServiceLevelRequest, + SetPropertyServiceLevelResponse, +) +from google.ads.marketingplatform_admin_v1alpha.types.resources import ( + AnalyticsAccountLink, + LinkVerificationState, + Organization, +) + +__all__ = ( + "MarketingplatformAdminServiceClient", + "MarketingplatformAdminServiceAsyncClient", + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "GetOrganizationRequest", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", + "AnalyticsServiceLevel", + "AnalyticsAccountLink", + "Organization", + "LinkVerificationState", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py new file mode 100644 index 000000000000..33d37a7b677b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed new file mode 100644 index 000000000000..4f4b168c56da --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-ads-marketingplatform-admin package uses inline types. diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py new file mode 100644 index 000000000000..c04bf34623a8 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.marketingplatform_admin_service import ( + MarketingplatformAdminServiceAsyncClient, + MarketingplatformAdminServiceClient, +) +from .types.marketingplatform_admin import ( + AnalyticsServiceLevel, + CreateAnalyticsAccountLinkRequest, + DeleteAnalyticsAccountLinkRequest, + GetOrganizationRequest, + ListAnalyticsAccountLinksRequest, + ListAnalyticsAccountLinksResponse, + SetPropertyServiceLevelRequest, + SetPropertyServiceLevelResponse, +) +from .types.resources import AnalyticsAccountLink, LinkVerificationState, Organization + +__all__ = ( + "MarketingplatformAdminServiceAsyncClient", + "AnalyticsAccountLink", + "AnalyticsServiceLevel", + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "GetOrganizationRequest", + "LinkVerificationState", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "MarketingplatformAdminServiceClient", + "Organization", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json new file mode 100644 index 000000000000..8d346e91ed67 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json @@ -0,0 +1,103 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.ads.marketingplatform_admin_v1alpha", + "protoPackage": "google.marketingplatform.admin.v1alpha", + "schema": "1.0", + "services": { + "MarketingplatformAdminService": { + "clients": { + "grpc": { + "libraryClient": "MarketingplatformAdminServiceClient", + "rpcs": { + "CreateAnalyticsAccountLink": { + "methods": [ + "create_analytics_account_link" + ] + }, + "DeleteAnalyticsAccountLink": { + "methods": [ + "delete_analytics_account_link" + ] + }, + "GetOrganization": { + "methods": [ + "get_organization" + ] + }, + "ListAnalyticsAccountLinks": { + "methods": [ + "list_analytics_account_links" + ] + }, + "SetPropertyServiceLevel": { + "methods": [ + "set_property_service_level" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MarketingplatformAdminServiceAsyncClient", + "rpcs": { + "CreateAnalyticsAccountLink": { + "methods": [ + "create_analytics_account_link" + ] + }, + "DeleteAnalyticsAccountLink": { + "methods": [ + "delete_analytics_account_link" + ] + }, + "GetOrganization": { + "methods": [ + "get_organization" + ] + }, + "ListAnalyticsAccountLinks": { + "methods": [ + "list_analytics_account_links" + ] + }, + "SetPropertyServiceLevel": { + "methods": [ + "set_property_service_level" + ] + } + } + }, + "rest": { + "libraryClient": "MarketingplatformAdminServiceClient", + "rpcs": { + "CreateAnalyticsAccountLink": { + "methods": [ + "create_analytics_account_link" + ] + }, + "DeleteAnalyticsAccountLink": { + "methods": [ + "delete_analytics_account_link" + ] + }, + "GetOrganization": { + "methods": [ + "get_organization" + ] + }, + "ListAnalyticsAccountLinks": { + "methods": [ + "list_analytics_account_links" + ] + }, + "SetPropertyServiceLevel": { + "methods": [ + "set_property_service_level" + ] + } + } + } + } + } + } +} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py new file mode 100644 index 000000000000..33d37a7b677b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed new file mode 100644 index 000000000000..4f4b168c56da --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-ads-marketingplatform-admin package uses inline types. diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py similarity index 88% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/__init__.py rename to packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py index 65fa5abb358e..8f6cf068242c 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/__init__.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/__init__.py @@ -13,6 +13,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import CreativeServiceClient - -__all__ = ("CreativeServiceClient",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py new file mode 100644 index 000000000000..e634b30fd6a2 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import MarketingplatformAdminServiceAsyncClient +from .client import MarketingplatformAdminServiceClient + +__all__ = ( + "MarketingplatformAdminServiceClient", + "MarketingplatformAdminServiceAsyncClient", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py new file mode 100644 index 000000000000..cc9647487d98 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py @@ -0,0 +1,894 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service import ( + pagers, +) +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .client import MarketingplatformAdminServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport +from .transports.grpc_asyncio import MarketingplatformAdminServiceGrpcAsyncIOTransport + + +class MarketingplatformAdminServiceAsyncClient: + """Service Interface for the Google Marketing Platform Admin + API. + """ + + _client: MarketingplatformAdminServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = MarketingplatformAdminServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + + account_path = staticmethod(MarketingplatformAdminServiceClient.account_path) + parse_account_path = staticmethod( + MarketingplatformAdminServiceClient.parse_account_path + ) + analytics_account_link_path = staticmethod( + MarketingplatformAdminServiceClient.analytics_account_link_path + ) + parse_analytics_account_link_path = staticmethod( + MarketingplatformAdminServiceClient.parse_analytics_account_link_path + ) + organization_path = staticmethod( + MarketingplatformAdminServiceClient.organization_path + ) + parse_organization_path = staticmethod( + MarketingplatformAdminServiceClient.parse_organization_path + ) + property_path = staticmethod(MarketingplatformAdminServiceClient.property_path) + parse_property_path = staticmethod( + MarketingplatformAdminServiceClient.parse_property_path + ) + common_billing_account_path = staticmethod( + MarketingplatformAdminServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + MarketingplatformAdminServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + MarketingplatformAdminServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + MarketingplatformAdminServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + MarketingplatformAdminServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + MarketingplatformAdminServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceAsyncClient: The constructed client. + """ + return MarketingplatformAdminServiceClient.from_service_account_info.__func__(MarketingplatformAdminServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MarketingplatformAdminServiceAsyncClient: The constructed client. + """ + return MarketingplatformAdminServiceClient.from_service_account_file.__func__(MarketingplatformAdminServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MarketingplatformAdminServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MarketingplatformAdminServiceTransport: + """Returns the transport used by the client instance. + + Returns: + MarketingplatformAdminServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = MarketingplatformAdminServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + MarketingplatformAdminServiceTransport, + Callable[..., MarketingplatformAdminServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the marketingplatform admin service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MarketingplatformAdminServiceTransport,Callable[..., MarketingplatformAdminServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MarketingplatformAdminServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MarketingplatformAdminServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_organization( + self, + request: Optional[ + Union[marketingplatform_admin.GetOrganizationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Organization: + r"""Lookup for a single organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_organization(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest, dict]]): + The request object. Request message for GetOrganization + RPC. + name (:class:`str`): + Required. The name of the Organization to retrieve. + Format: organizations/{org_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.Organization: + A resource message representing a + Google Marketing Platform organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, marketingplatform_admin.GetOrganizationRequest): + request = marketingplatform_admin.GetOrganizationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_organization + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_analytics_account_links( + self, + request: Optional[ + Union[marketingplatform_admin.ListAnalyticsAccountLinksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAnalyticsAccountLinksAsyncPager: + r"""Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest, dict]]): + The request object. Request message for + ListAnalyticsAccountLinks RPC. + parent (:class:`str`): + Required. The parent organization, which owns this + collection of Analytics account links. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksAsyncPager: + Response message for + ListAnalyticsAccountLinks RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.ListAnalyticsAccountLinksRequest + ): + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_analytics_account_links + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAnalyticsAccountLinksAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.CreateAnalyticsAccountLinkRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + analytics_account_link: Optional[resources.AnalyticsAccountLink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AnalyticsAccountLink: + r"""Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = await client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest, dict]]): + The request object. Request message for + CreateAnalyticsAccountLink RPC. + parent (:class:`str`): + Required. The parent resource where this Analytics + account link will be created. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + analytics_account_link (:class:`google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink`): + Required. The Analytics account link + to create. + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink: + A resource message representing the + link between a Google Analytics account + and a Google Marketing Platform + organization. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.CreateAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + await client.delete_analytics_account_link(request=request) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest, dict]]): + The request object. Request message for + DeleteAnalyticsAccountLink RPC. + name (:class:`str`): + Required. The name of the Analytics account link to + delete. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.DeleteAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_property_service_level( + self, + request: Optional[ + Union[marketingplatform_admin.SetPropertyServiceLevelRequest, dict] + ] = None, + *, + analytics_account_link: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + r"""Updates the service level for an Analytics property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = await client.set_property_service_level(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest, dict]]): + The request object. Request message for + SetPropertyServiceLevel RPC. + analytics_account_link (:class:`str`): + Required. The parent AnalyticsAccountLink scope where + this property is in. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse: + Response message for + SetPropertyServiceLevel RPC. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.SetPropertyServiceLevelRequest + ): + request = marketingplatform_admin.SetPropertyServiceLevelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.set_property_service_level + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("analytics_account_link", request.analytics_account_link),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "MarketingplatformAdminServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("MarketingplatformAdminServiceAsyncClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/client.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py similarity index 59% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/client.py rename to packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py index c04fdc539730..f31761153ab6 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/creative_service/client.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py @@ -41,25 +41,29 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.ads.admanager_v1 import gapic_version as package_version +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - -from google.ads.admanager_v1.services.creative_service import pagers -from google.ads.admanager_v1.types import ad_partner_declaration, creative_service +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service import ( + pagers, +) +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) -from .transports.base import DEFAULT_CLIENT_INFO, CreativeServiceTransport -from .transports.rest import CreativeServiceRestTransport +from .transports.base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport +from .transports.grpc import MarketingplatformAdminServiceGrpcTransport +from .transports.grpc_asyncio import MarketingplatformAdminServiceGrpcAsyncIOTransport +from .transports.rest import MarketingplatformAdminServiceRestTransport -class CreativeServiceClientMeta(type): - """Metaclass for the CreativeService client. +class MarketingplatformAdminServiceClientMeta(type): + """Metaclass for the MarketingplatformAdminService client. This provides class-level methods for building and retrieving support objects (e.g. transport) without polluting the client instance @@ -68,13 +72,17 @@ class CreativeServiceClientMeta(type): _transport_registry = ( OrderedDict() - ) # type: Dict[str, Type[CreativeServiceTransport]] - _transport_registry["rest"] = CreativeServiceRestTransport + ) # type: Dict[str, Type[MarketingplatformAdminServiceTransport]] + _transport_registry["grpc"] = MarketingplatformAdminServiceGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = MarketingplatformAdminServiceGrpcAsyncIOTransport + _transport_registry["rest"] = MarketingplatformAdminServiceRestTransport def get_transport_class( cls, label: Optional[str] = None, - ) -> Type[CreativeServiceTransport]: + ) -> Type[MarketingplatformAdminServiceTransport]: """Returns an appropriate transport class. Args: @@ -93,8 +101,12 @@ def get_transport_class( return next(iter(cls._transport_registry.values())) -class CreativeServiceClient(metaclass=CreativeServiceClientMeta): - """Provides methods for handling Creative objects.""" +class MarketingplatformAdminServiceClient( + metaclass=MarketingplatformAdminServiceClientMeta +): + """Service Interface for the Google Marketing Platform Admin + API. + """ @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -127,12 +139,12 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "admanager.googleapis.com" + DEFAULT_ENDPOINT = "marketingplatformadmin.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) - _DEFAULT_ENDPOINT_TEMPLATE = "admanager.{UNIVERSE_DOMAIN}" + _DEFAULT_ENDPOINT_TEMPLATE = "marketingplatformadmin.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @classmethod @@ -146,7 +158,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - CreativeServiceClient: The constructed client. + MarketingplatformAdminServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials @@ -164,7 +176,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - CreativeServiceClient: The constructed client. + MarketingplatformAdminServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -173,85 +185,78 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @property - def transport(self) -> CreativeServiceTransport: + def transport(self) -> MarketingplatformAdminServiceTransport: """Returns the transport used by the client instance. Returns: - CreativeServiceTransport: The transport used by the client + MarketingplatformAdminServiceTransport: The transport used by the client instance. """ return self._transport @staticmethod - def ad_partner_path( - network_code: str, - ad_partner: str, + def account_path( + account: str, ) -> str: - """Returns a fully-qualified ad_partner string.""" - return "networks/{network_code}/adPartners/{ad_partner}".format( - network_code=network_code, - ad_partner=ad_partner, + """Returns a fully-qualified account string.""" + return "accounts/{account}".format( + account=account, ) @staticmethod - def parse_ad_partner_path(path: str) -> Dict[str, str]: - """Parses a ad_partner path into its component segments.""" - m = re.match( - r"^networks/(?P.+?)/adPartners/(?P.+?)$", path - ) + def parse_account_path(path: str) -> Dict[str, str]: + """Parses a account path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def company_path( - network_code: str, - company: str, + def analytics_account_link_path( + organization: str, + analytics_account_link: str, ) -> str: - """Returns a fully-qualified company string.""" - return "networks/{network_code}/companies/{company}".format( - network_code=network_code, - company=company, + """Returns a fully-qualified analytics_account_link string.""" + return "organizations/{organization}/analyticsAccountLinks/{analytics_account_link}".format( + organization=organization, + analytics_account_link=analytics_account_link, ) @staticmethod - def parse_company_path(path: str) -> Dict[str, str]: - """Parses a company path into its component segments.""" + def parse_analytics_account_link_path(path: str) -> Dict[str, str]: + """Parses a analytics_account_link path into its component segments.""" m = re.match( - r"^networks/(?P.+?)/companies/(?P.+?)$", path + r"^organizations/(?P.+?)/analyticsAccountLinks/(?P.+?)$", + path, ) return m.groupdict() if m else {} @staticmethod - def creative_path( - network_code: str, - creative: str, + def organization_path( + organization: str, ) -> str: - """Returns a fully-qualified creative string.""" - return "networks/{network_code}/creatives/{creative}".format( - network_code=network_code, - creative=creative, + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, ) @staticmethod - def parse_creative_path(path: str) -> Dict[str, str]: - """Parses a creative path into its component segments.""" - m = re.match( - r"^networks/(?P.+?)/creatives/(?P.+?)$", path - ) + def parse_organization_path(path: str) -> Dict[str, str]: + """Parses a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def network_path( - network_code: str, + def property_path( + property: str, ) -> str: - """Returns a fully-qualified network string.""" - return "networks/{network_code}".format( - network_code=network_code, + """Returns a fully-qualified property string.""" + return "properties/{property}".format( + property=property, ) @staticmethod - def parse_network_path(path: str) -> Dict[str, str]: - """Parses a network path into its component segments.""" - m = re.match(r"^networks/(?P.+?)$", path) + def parse_property_path(path: str) -> Dict[str, str]: + """Parses a property path into its component segments.""" + m = re.match(r"^properties/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod @@ -473,15 +478,17 @@ def _get_api_endpoint( elif use_mtls_endpoint == "always" or ( use_mtls_endpoint == "auto" and client_cert_source ): - _default_universe = CreativeServiceClient._DEFAULT_UNIVERSE + _default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: raise MutualTLSChannelError( f"mTLS is not supported in any universe other than {_default_universe}." ) - api_endpoint = CreativeServiceClient.DEFAULT_MTLS_ENDPOINT + api_endpoint = MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = CreativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=universe_domain + api_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) ) return api_endpoint @@ -501,7 +508,7 @@ def _get_universe_domain( Raises: ValueError: If the universe domain is an empty string. """ - universe_domain = CreativeServiceClient._DEFAULT_UNIVERSE + universe_domain = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE if client_universe_domain is not None: universe_domain = client_universe_domain elif universe_domain_env is not None: @@ -527,7 +534,7 @@ def _compare_universes( ValueError: when client_universe does not match the universe in credentials. """ - default_universe = CreativeServiceClient._DEFAULT_UNIVERSE + default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE credentials_universe = getattr(credentials, "universe_domain", default_universe) if client_universe != credentials_universe: @@ -551,7 +558,7 @@ def _validate_universe_domain(self): """ self._is_universe_domain_valid = ( self._is_universe_domain_valid - or CreativeServiceClient._compare_universes( + or MarketingplatformAdminServiceClient._compare_universes( self.universe_domain, self.transport._credentials ) ) @@ -581,13 +588,15 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[ Union[ - str, CreativeServiceTransport, Callable[..., CreativeServiceTransport] + str, + MarketingplatformAdminServiceTransport, + Callable[..., MarketingplatformAdminServiceTransport], ] ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the creative service client. + """Instantiates the marketingplatform admin service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -595,10 +604,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Optional[Union[str,CreativeServiceTransport,Callable[..., CreativeServiceTransport]]]): + transport (Optional[Union[str,MarketingplatformAdminServiceTransport,Callable[..., MarketingplatformAdminServiceTransport]]]): The transport to use, or a Callable that constructs and returns a new transport. If a Callable is given, it will be called with the same set of initialization - arguments as used in the CreativeServiceTransport constructor. + arguments as used in the MarketingplatformAdminServiceTransport constructor. If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -651,12 +660,16 @@ def __init__( self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env, - ) = CreativeServiceClient._read_environment_variables() - self._client_cert_source = CreativeServiceClient._get_client_cert_source( - self._client_options.client_cert_source, self._use_client_cert + ) = MarketingplatformAdminServiceClient._read_environment_variables() + self._client_cert_source = ( + MarketingplatformAdminServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) ) - self._universe_domain = CreativeServiceClient._get_universe_domain( - universe_domain_opt, self._universe_domain_env + self._universe_domain = ( + MarketingplatformAdminServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) ) self._api_endpoint = None # updated below, depending on `transport` @@ -672,9 +685,11 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, CreativeServiceTransport) + transport_provided = isinstance( + transport, MarketingplatformAdminServiceTransport + ) if transport_provided: - # transport is a CreativeServiceTransport instance. + # transport is a MarketingplatformAdminServiceTransport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " @@ -685,12 +700,12 @@ def __init__( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = cast(CreativeServiceTransport, transport) + self._transport = cast(MarketingplatformAdminServiceTransport, transport) self._api_endpoint = self._transport.host self._api_endpoint = ( self._api_endpoint - or CreativeServiceClient._get_api_endpoint( + or MarketingplatformAdminServiceClient._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, @@ -709,11 +724,14 @@ def __init__( ) transport_init: Union[ - Type[CreativeServiceTransport], Callable[..., CreativeServiceTransport] + Type[MarketingplatformAdminServiceTransport], + Callable[..., MarketingplatformAdminServiceTransport], ] = ( - CreativeServiceClient.get_transport_class(transport) + MarketingplatformAdminServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None - else cast(Callable[..., CreativeServiceTransport], transport) + else cast( + Callable[..., MarketingplatformAdminServiceTransport], transport + ) ) # initialize with the provided callable or the passed in class self._transport = transport_init( @@ -728,16 +746,18 @@ def __init__( api_audience=self._client_options.api_audience, ) - def get_creative( + def get_organization( self, - request: Optional[Union[creative_service.GetCreativeRequest, dict]] = None, + request: Optional[ + Union[marketingplatform_admin.GetOrganizationRequest, dict] + ] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> creative_service.Creative: - r"""API to retrieve a Creative object. + ) -> resources.Organization: + r"""Lookup for a single organization. .. code-block:: python @@ -748,30 +768,30 @@ def get_creative( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.ads import admanager_v1 + from google.ads import marketingplatform_admin_v1alpha - def sample_get_creative(): + def sample_get_organization(): # Create a client - client = admanager_v1.CreativeServiceClient() + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() # Initialize request argument(s) - request = admanager_v1.GetCreativeRequest( + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( name="name_value", ) # Make the request - response = client.get_creative(request=request) + response = client.get_organization(request=request) # Handle the response print(response) Args: - request (Union[google.ads.admanager_v1.types.GetCreativeRequest, dict]): - The request object. Request object for GetCreative - method. + request (Union[google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest, dict]): + The request object. Request message for GetOrganization + RPC. name (str): - Required. The resource name of the Creative. Format: - ``networks/{network_code}/creatives/{creative_id}`` + Required. The name of the Organization to retrieve. + Format: organizations/{org_id} This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -783,8 +803,10 @@ def sample_get_creative(): sent along with the request as metadata. Returns: - google.ads.admanager_v1.types.Creative: - The Creative resource. + google.ads.marketingplatform_admin_v1alpha.types.Organization: + A resource message representing a + Google Marketing Platform organization. + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -798,8 +820,8 @@ def sample_get_creative(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, creative_service.GetCreativeRequest): - request = creative_service.GetCreativeRequest(request) + if not isinstance(request, marketingplatform_admin.GetOrganizationRequest): + request = marketingplatform_admin.GetOrganizationRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -807,7 +829,7 @@ def sample_get_creative(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_creative] + rpc = self._transport._wrapped_methods[self._transport.get_organization] # Certain fields should be provided within the metadata header; # add these here. @@ -829,16 +851,19 @@ def sample_get_creative(): # Done; return the response. return response - def list_creatives( + def list_analytics_account_links( self, - request: Optional[Union[creative_service.ListCreativesRequest, dict]] = None, + request: Optional[ + Union[marketingplatform_admin.ListAnalyticsAccountLinksRequest, dict] + ] = None, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListCreativesPager: - r"""API to retrieve a list of Creative objects. + ) -> pagers.ListAnalyticsAccountLinksPager: + r"""Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. .. code-block:: python @@ -849,31 +874,32 @@ def list_creatives( # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.ads import admanager_v1 + from google.ads import marketingplatform_admin_v1alpha - def sample_list_creatives(): + def sample_list_analytics_account_links(): # Create a client - client = admanager_v1.CreativeServiceClient() + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() # Initialize request argument(s) - request = admanager_v1.ListCreativesRequest( + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( parent="parent_value", ) # Make the request - page_result = client.list_creatives(request=request) + page_result = client.list_analytics_account_links(request=request) # Handle the response for response in page_result: print(response) Args: - request (Union[google.ads.admanager_v1.types.ListCreativesRequest, dict]): - The request object. Request object for ListCreatives - method. + request (Union[google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest, dict]): + The request object. Request message for + ListAnalyticsAccountLinks RPC. parent (str): - Required. The parent, which owns this collection of - Creatives. Format: networks/{network_code} + Required. The parent organization, which owns this + collection of Analytics account links. Format: + organizations/{org_id} This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -885,11 +911,9 @@ def sample_list_creatives(): sent along with the request as metadata. Returns: - google.ads.admanager_v1.services.creative_service.pagers.ListCreativesPager: - Response object for - ListCreativesRequest containing matching - Creative resources. - + google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksPager: + Response message for + ListAnalyticsAccountLinks RPC. Iterating over this object will yield results and resolve additional pages automatically. @@ -907,8 +931,10 @@ def sample_list_creatives(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, creative_service.ListCreativesRequest): - request = creative_service.ListCreativesRequest(request) + if not isinstance( + request, marketingplatform_admin.ListAnalyticsAccountLinksRequest + ): + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: @@ -916,7 +942,9 @@ def sample_list_creatives(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_creatives] + rpc = self._transport._wrapped_methods[ + self._transport.list_analytics_account_links + ] # Certain fields should be provided within the metadata header; # add these here. @@ -937,7 +965,7 @@ def sample_list_creatives(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListCreativesPager( + response = pagers.ListAnalyticsAccountLinksPager( method=rpc, request=request, response=response, @@ -949,55 +977,224 @@ def sample_list_creatives(): # Done; return the response. return response - def __enter__(self) -> "CreativeServiceClient": - return self + def create_analytics_account_link( + self, + request: Optional[ + Union[marketingplatform_admin.CreateAnalyticsAccountLinkRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + analytics_account_link: Optional[resources.AnalyticsAccountLink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AnalyticsAccountLink: + r"""Creates the link between the Analytics account and + the Google Marketing Platform organization. - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_create_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + analytics_account_link=analytics_account_link, + ) + + # Make the request + response = client.create_analytics_account_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest, dict]): + The request object. Request message for + CreateAnalyticsAccountLink RPC. + parent (str): + Required. The parent resource where this Analytics + account link will be created. Format: + organizations/{org_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + analytics_account_link (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink): + Required. The Analytics account link + to create. + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink: + A resource message representing the + link between a Google Analytics account + and a Google Marketing Platform + organization. - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! """ - self.transport.close() + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) - def get_operation( + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.CreateAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_analytics_account_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_analytics_account_link( self, - request: Optional[operations_pb2.GetOperationRequest] = None, + request: Optional[ + Union[marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, dict] + ] = None, *, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. + ) -> None: + r"""Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + client.delete_analytics_account_link(request=request) Args: - request (:class:`~.operations_pb2.GetOperationRequest`): + request (Union[google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest, dict]): The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. + DeleteAnalyticsAccountLink RPC. + name (str): + Required. The name of the Analytics account link to + delete. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.DeleteAnalyticsAccountLinkRequest + ): + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[ + self._transport.delete_analytics_account_link + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1008,6 +1205,117 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_property_service_level( + self, + request: Optional[ + Union[marketingplatform_admin.SetPropertyServiceLevelRequest, dict] + ] = None, + *, + analytics_account_link: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + r"""Updates the service level for an Analytics property. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = client.set_property_service_level(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest, dict]): + The request object. Request message for + SetPropertyServiceLevel RPC. + analytics_account_link (str): + Required. The parent AnalyticsAccountLink scope where + this property is in. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + + This corresponds to the ``analytics_account_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse: + Response message for + SetPropertyServiceLevel RPC. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([analytics_account_link]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.SetPropertyServiceLevelRequest + ): + request = marketingplatform_admin.SetPropertyServiceLevelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if analytics_account_link is not None: + request.analytics_account_link = analytics_account_link + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.set_property_service_level + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("analytics_account_link", request.analytics_account_link),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1019,10 +1327,23 @@ def get_operation( # Done; return the response. return response + def __enter__(self) -> "MarketingplatformAdminServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) -__all__ = ("CreativeServiceClient",) +__all__ = ("MarketingplatformAdminServiceClient",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py new file mode 100644 index 000000000000..bed8bd431770 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + + +class ListAnalyticsAccountLinksPager: + """A pager for iterating through ``list_analytics_account_links`` requests. + + This class thinly wraps an initial + :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``analytics_account_links`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAnalyticsAccountLinks`` requests and continue to iterate + through the ``analytics_account_links`` field on the + corresponding responses. + + All the usual :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., marketingplatform_admin.ListAnalyticsAccountLinksResponse + ], + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + response: marketingplatform_admin.ListAnalyticsAccountLinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest): + The initial request object. + response (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[marketingplatform_admin.ListAnalyticsAccountLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[resources.AnalyticsAccountLink]: + for page in self.pages: + yield from page.analytics_account_links + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAnalyticsAccountLinksAsyncPager: + """A pager for iterating through ``list_analytics_account_links`` requests. + + This class thinly wraps an initial + :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``analytics_account_links`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAnalyticsAccountLinks`` requests and continue to iterate + through the ``analytics_account_links`` field on the + corresponding responses. + + All the usual :class:`google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[marketingplatform_admin.ListAnalyticsAccountLinksResponse] + ], + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + response: marketingplatform_admin.ListAnalyticsAccountLinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest): + The initial request object. + response (google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[marketingplatform_admin.ListAnalyticsAccountLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.AnalyticsAccountLink]: + async def async_generator(): + async for page in self.pages: + for response in page.analytics_account_links: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py new file mode 100644 index 000000000000..205d647ea99a --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MarketingplatformAdminServiceTransport +from .grpc import MarketingplatformAdminServiceGrpcTransport +from .grpc_asyncio import MarketingplatformAdminServiceGrpcAsyncIOTransport +from .rest import ( + MarketingplatformAdminServiceRestInterceptor, + MarketingplatformAdminServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[MarketingplatformAdminServiceTransport]] +_transport_registry["grpc"] = MarketingplatformAdminServiceGrpcTransport +_transport_registry["grpc_asyncio"] = MarketingplatformAdminServiceGrpcAsyncIOTransport +_transport_registry["rest"] = MarketingplatformAdminServiceRestTransport + +__all__ = ( + "MarketingplatformAdminServiceTransport", + "MarketingplatformAdminServiceGrpcTransport", + "MarketingplatformAdminServiceGrpcAsyncIOTransport", + "MarketingplatformAdminServiceRestTransport", + "MarketingplatformAdminServiceRestInterceptor", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py new file mode 100644 index 000000000000..6f70b5c211e1 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py @@ -0,0 +1,232 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ads.marketingplatform_admin_v1alpha import gapic_version as package_version +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class MarketingplatformAdminServiceTransport(abc.ABC): + """Abstract transport class for MarketingplatformAdminService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ) + + DEFAULT_HOST: str = "marketingplatformadmin.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_organization: gapic_v1.method.wrap_method( + self.get_organization, + default_timeout=None, + client_info=client_info, + ), + self.list_analytics_account_links: gapic_v1.method.wrap_method( + self.list_analytics_account_links, + default_timeout=None, + client_info=client_info, + ), + self.create_analytics_account_link: gapic_v1.method.wrap_method( + self.create_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_analytics_account_link: gapic_v1.method.wrap_method( + self.delete_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.set_property_service_level: gapic_v1.method.wrap_method( + self.set_property_service_level, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], + Union[resources.Organization, Awaitable[resources.Organization]], + ]: + raise NotImplementedError() + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + Union[ + marketingplatform_admin.ListAnalyticsAccountLinksResponse, + Awaitable[marketingplatform_admin.ListAnalyticsAccountLinksResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + Union[ + resources.AnalyticsAccountLink, Awaitable[resources.AnalyticsAccountLink] + ], + ]: + raise NotImplementedError() + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + Union[ + marketingplatform_admin.SetPropertyServiceLevelResponse, + Awaitable[marketingplatform_admin.SetPropertyServiceLevelResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("MarketingplatformAdminServiceTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py new file mode 100644 index 000000000000..e8f3656c2d06 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py @@ -0,0 +1,412 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport + + +class MarketingplatformAdminServiceGrpcTransport( + MarketingplatformAdminServiceTransport +): + """gRPC backend transport for MarketingplatformAdminService. + + Service Interface for the Google Marketing Platform Admin + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], resources.Organization + ]: + r"""Return a callable for the get organization method over gRPC. + + Lookup for a single organization. + + Returns: + Callable[[~.GetOrganizationRequest], + ~.Organization]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_organization" not in self._stubs: + self._stubs["get_organization"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/GetOrganization", + request_serializer=marketingplatform_admin.GetOrganizationRequest.serialize, + response_deserializer=resources.Organization.deserialize, + ) + return self._stubs["get_organization"] + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + marketingplatform_admin.ListAnalyticsAccountLinksResponse, + ]: + r"""Return a callable for the list analytics account links method over gRPC. + + Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + Returns: + Callable[[~.ListAnalyticsAccountLinksRequest], + ~.ListAnalyticsAccountLinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_analytics_account_links" not in self._stubs: + self._stubs["list_analytics_account_links"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/ListAnalyticsAccountLinks", + request_serializer=marketingplatform_admin.ListAnalyticsAccountLinksRequest.serialize, + response_deserializer=marketingplatform_admin.ListAnalyticsAccountLinksResponse.deserialize, + ) + return self._stubs["list_analytics_account_links"] + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + resources.AnalyticsAccountLink, + ]: + r"""Return a callable for the create analytics account link method over gRPC. + + Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + Returns: + Callable[[~.CreateAnalyticsAccountLinkRequest], + ~.AnalyticsAccountLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_analytics_account_link" not in self._stubs: + self._stubs[ + "create_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/CreateAnalyticsAccountLink", + request_serializer=marketingplatform_admin.CreateAnalyticsAccountLinkRequest.serialize, + response_deserializer=resources.AnalyticsAccountLink.deserialize, + ) + return self._stubs["create_analytics_account_link"] + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], empty_pb2.Empty + ]: + r"""Return a callable for the delete analytics account link method over gRPC. + + Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + Returns: + Callable[[~.DeleteAnalyticsAccountLinkRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_analytics_account_link" not in self._stubs: + self._stubs[ + "delete_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/DeleteAnalyticsAccountLink", + request_serializer=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_analytics_account_link"] + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + marketingplatform_admin.SetPropertyServiceLevelResponse, + ]: + r"""Return a callable for the set property service level method over gRPC. + + Updates the service level for an Analytics property. + + Returns: + Callable[[~.SetPropertyServiceLevelRequest], + ~.SetPropertyServiceLevelResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_property_service_level" not in self._stubs: + self._stubs["set_property_service_level"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/SetPropertyServiceLevel", + request_serializer=marketingplatform_admin.SetPropertyServiceLevelRequest.serialize, + response_deserializer=marketingplatform_admin.SetPropertyServiceLevelResponse.deserialize, + ) + return self._stubs["set_property_service_level"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("MarketingplatformAdminServiceGrpcTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..caef725e70be --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py @@ -0,0 +1,444 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .base import DEFAULT_CLIENT_INFO, MarketingplatformAdminServiceTransport +from .grpc import MarketingplatformAdminServiceGrpcTransport + + +class MarketingplatformAdminServiceGrpcAsyncIOTransport( + MarketingplatformAdminServiceTransport +): + """gRPC AsyncIO backend transport for MarketingplatformAdminService. + + Service Interface for the Google Marketing Platform Admin + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], + Awaitable[resources.Organization], + ]: + r"""Return a callable for the get organization method over gRPC. + + Lookup for a single organization. + + Returns: + Callable[[~.GetOrganizationRequest], + Awaitable[~.Organization]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_organization" not in self._stubs: + self._stubs["get_organization"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/GetOrganization", + request_serializer=marketingplatform_admin.GetOrganizationRequest.serialize, + response_deserializer=resources.Organization.deserialize, + ) + return self._stubs["get_organization"] + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + Awaitable[marketingplatform_admin.ListAnalyticsAccountLinksResponse], + ]: + r"""Return a callable for the list analytics account links method over gRPC. + + Lists the Google Analytics accounts link to the + specified Google Marketing Platform organization. + + Returns: + Callable[[~.ListAnalyticsAccountLinksRequest], + Awaitable[~.ListAnalyticsAccountLinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_analytics_account_links" not in self._stubs: + self._stubs["list_analytics_account_links"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/ListAnalyticsAccountLinks", + request_serializer=marketingplatform_admin.ListAnalyticsAccountLinksRequest.serialize, + response_deserializer=marketingplatform_admin.ListAnalyticsAccountLinksResponse.deserialize, + ) + return self._stubs["list_analytics_account_links"] + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + Awaitable[resources.AnalyticsAccountLink], + ]: + r"""Return a callable for the create analytics account link method over gRPC. + + Creates the link between the Analytics account and + the Google Marketing Platform organization. + + User needs to be an org user, and admin on the Analytics + account to create the link. If the account is already + linked to an organization, user needs to unlink the + account from the current organization, then try link + again. + + Returns: + Callable[[~.CreateAnalyticsAccountLinkRequest], + Awaitable[~.AnalyticsAccountLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_analytics_account_link" not in self._stubs: + self._stubs[ + "create_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/CreateAnalyticsAccountLink", + request_serializer=marketingplatform_admin.CreateAnalyticsAccountLinkRequest.serialize, + response_deserializer=resources.AnalyticsAccountLink.deserialize, + ) + return self._stubs["create_analytics_account_link"] + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], + Awaitable[empty_pb2.Empty], + ]: + r"""Return a callable for the delete analytics account link method over gRPC. + + Deletes the AnalyticsAccountLink, which detaches the + Analytics account from the Google Marketing Platform + organization. + + User needs to be an org user, and admin on the Analytics + account in order to delete the link. + + Returns: + Callable[[~.DeleteAnalyticsAccountLinkRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_analytics_account_link" not in self._stubs: + self._stubs[ + "delete_analytics_account_link" + ] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/DeleteAnalyticsAccountLink", + request_serializer=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_analytics_account_link"] + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + Awaitable[marketingplatform_admin.SetPropertyServiceLevelResponse], + ]: + r"""Return a callable for the set property service level method over gRPC. + + Updates the service level for an Analytics property. + + Returns: + Callable[[~.SetPropertyServiceLevelRequest], + Awaitable[~.SetPropertyServiceLevelResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_property_service_level" not in self._stubs: + self._stubs["set_property_service_level"] = self.grpc_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/SetPropertyServiceLevel", + request_serializer=marketingplatform_admin.SetPropertyServiceLevelRequest.serialize, + response_deserializer=marketingplatform_admin.SetPropertyServiceLevelResponse.deserialize, + ) + return self._stubs["set_property_service_level"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_organization: gapic_v1.method_async.wrap_method( + self.get_organization, + default_timeout=None, + client_info=client_info, + ), + self.list_analytics_account_links: gapic_v1.method_async.wrap_method( + self.list_analytics_account_links, + default_timeout=None, + client_info=client_info, + ), + self.create_analytics_account_link: gapic_v1.method_async.wrap_method( + self.create_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_analytics_account_link: gapic_v1.method_async.wrap_method( + self.delete_analytics_account_link, + default_timeout=None, + client_info=client_info, + ), + self.set_property_service_level: gapic_v1.method_async.wrap_method( + self.set_property_service_level, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("MarketingplatformAdminServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py new file mode 100644 index 000000000000..b3894ca05fc4 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py @@ -0,0 +1,858 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import MarketingplatformAdminServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MarketingplatformAdminServiceRestInterceptor: + """Interceptor for MarketingplatformAdminService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MarketingplatformAdminServiceRestTransport. + + .. code-block:: python + class MyCustomMarketingplatformAdminServiceInterceptor(MarketingplatformAdminServiceRestInterceptor): + def pre_create_analytics_account_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_analytics_account_link(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_analytics_account_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_organization(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_organization(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_analytics_account_links(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_analytics_account_links(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_property_service_level(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_property_service_level(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MarketingplatformAdminServiceRestTransport(interceptor=MyCustomMarketingplatformAdminServiceInterceptor()) + client = MarketingplatformAdminServiceClient(transport=transport) + + + """ + + def pre_create_analytics_account_link( + self, + request: marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_analytics_account_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_create_analytics_account_link( + self, response: resources.AnalyticsAccountLink + ) -> resources.AnalyticsAccountLink: + """Post-rpc interceptor for create_analytics_account_link + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + def pre_delete_analytics_account_link( + self, + request: marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_analytics_account_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def pre_get_organization( + self, + request: marketingplatform_admin.GetOrganizationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.GetOrganizationRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_organization + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_get_organization( + self, response: resources.Organization + ) -> resources.Organization: + """Post-rpc interceptor for get_organization + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + def pre_list_analytics_account_links( + self, + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.ListAnalyticsAccountLinksRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_analytics_account_links + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_list_analytics_account_links( + self, response: marketingplatform_admin.ListAnalyticsAccountLinksResponse + ) -> marketingplatform_admin.ListAnalyticsAccountLinksResponse: + """Post-rpc interceptor for list_analytics_account_links + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + def pre_set_property_service_level( + self, + request: marketingplatform_admin.SetPropertyServiceLevelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + marketingplatform_admin.SetPropertyServiceLevelRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for set_property_service_level + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_set_property_service_level( + self, response: marketingplatform_admin.SetPropertyServiceLevelResponse + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + """Post-rpc interceptor for set_property_service_level + + Override in a subclass to manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MarketingplatformAdminServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MarketingplatformAdminServiceRestInterceptor + + +class MarketingplatformAdminServiceRestTransport( + MarketingplatformAdminServiceTransport +): + """REST backend transport for MarketingplatformAdminService. + + Service Interface for the Google Marketing Platform Admin + API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "marketingplatformadmin.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MarketingplatformAdminServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'marketingplatformadmin.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or MarketingplatformAdminServiceRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + class _CreateAnalyticsAccountLink(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("CreateAnalyticsAccountLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.AnalyticsAccountLink: + r"""Call the create analytics account + link method over HTTP. + + Args: + request (~.marketingplatform_admin.CreateAnalyticsAccountLinkRequest): + The request object. Request message for + CreateAnalyticsAccountLink RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.AnalyticsAccountLink: + A resource message representing the + link between a Google Analytics account + and a Google Marketing Platform + organization. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=organizations/*}/analyticsAccountLinks", + "body": "analytics_account_link", + }, + ] + request, metadata = self._interceptor.pre_create_analytics_account_link( + request, metadata + ) + pb_request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.AnalyticsAccountLink() + pb_resp = resources.AnalyticsAccountLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_analytics_account_link(resp) + return resp + + class _DeleteAnalyticsAccountLink(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("DeleteAnalyticsAccountLink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete analytics account + link method over HTTP. + + Args: + request (~.marketingplatform_admin.DeleteAnalyticsAccountLinkRequest): + The request object. Request message for + DeleteAnalyticsAccountLink RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=organizations/*/analyticsAccountLinks/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_analytics_account_link( + request, metadata + ) + pb_request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetOrganization(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("GetOrganization") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.GetOrganizationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Organization: + r"""Call the get organization method over HTTP. + + Args: + request (~.marketingplatform_admin.GetOrganizationRequest): + The request object. Request message for GetOrganization + RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.resources.Organization: + A resource message representing a + Google Marketing Platform organization. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=organizations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_organization( + request, metadata + ) + pb_request = marketingplatform_admin.GetOrganizationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Organization() + pb_resp = resources.Organization.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_organization(resp) + return resp + + class _ListAnalyticsAccountLinks(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("ListAnalyticsAccountLinks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.ListAnalyticsAccountLinksResponse: + r"""Call the list analytics account + links method over HTTP. + + Args: + request (~.marketingplatform_admin.ListAnalyticsAccountLinksRequest): + The request object. Request message for + ListAnalyticsAccountLinks RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.marketingplatform_admin.ListAnalyticsAccountLinksResponse: + Response message for + ListAnalyticsAccountLinks RPC. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=organizations/*}/analyticsAccountLinks", + }, + ] + request, metadata = self._interceptor.pre_list_analytics_account_links( + request, metadata + ) + pb_request = marketingplatform_admin.ListAnalyticsAccountLinksRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + pb_resp = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_analytics_account_links(resp) + return resp + + class _SetPropertyServiceLevel(MarketingplatformAdminServiceRestStub): + def __hash__(self): + return hash("SetPropertyServiceLevel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: marketingplatform_admin.SetPropertyServiceLevelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: + r"""Call the set property service + level method over HTTP. + + Args: + request (~.marketingplatform_admin.SetPropertyServiceLevelRequest): + The request object. Request message for + SetPropertyServiceLevel RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.marketingplatform_admin.SetPropertyServiceLevelResponse: + Response message for + SetPropertyServiceLevel RPC. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{analytics_account_link=organizations/*/analyticsAccountLinks/*}:setPropertyServiceLevel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_property_service_level( + request, metadata + ) + pb_request = marketingplatform_admin.SetPropertyServiceLevelRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = marketingplatform_admin.SetPropertyServiceLevelResponse() + pb_resp = marketingplatform_admin.SetPropertyServiceLevelResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_property_service_level(resp) + return resp + + @property + def create_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.CreateAnalyticsAccountLinkRequest], + resources.AnalyticsAccountLink, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAnalyticsAccountLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_analytics_account_link( + self, + ) -> Callable[ + [marketingplatform_admin.DeleteAnalyticsAccountLinkRequest], empty_pb2.Empty + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAnalyticsAccountLink(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_organization( + self, + ) -> Callable[ + [marketingplatform_admin.GetOrganizationRequest], resources.Organization + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetOrganization(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_analytics_account_links( + self, + ) -> Callable[ + [marketingplatform_admin.ListAnalyticsAccountLinksRequest], + marketingplatform_admin.ListAnalyticsAccountLinksResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAnalyticsAccountLinks(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_property_service_level( + self, + ) -> Callable[ + [marketingplatform_admin.SetPropertyServiceLevelRequest], + marketingplatform_admin.SetPropertyServiceLevelResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetPropertyServiceLevel(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MarketingplatformAdminServiceRestTransport",) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py new file mode 100644 index 000000000000..617c3bec15b6 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .marketingplatform_admin import ( + AnalyticsServiceLevel, + CreateAnalyticsAccountLinkRequest, + DeleteAnalyticsAccountLinkRequest, + GetOrganizationRequest, + ListAnalyticsAccountLinksRequest, + ListAnalyticsAccountLinksResponse, + SetPropertyServiceLevelRequest, + SetPropertyServiceLevelResponse, +) +from .resources import AnalyticsAccountLink, LinkVerificationState, Organization + +__all__ = ( + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "GetOrganizationRequest", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", + "AnalyticsServiceLevel", + "AnalyticsAccountLink", + "Organization", + "LinkVerificationState", +) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py new file mode 100644 index 000000000000..a446e0c57b69 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ads.marketingplatform_admin_v1alpha.types import resources + +__protobuf__ = proto.module( + package="google.marketingplatform.admin.v1alpha", + manifest={ + "AnalyticsServiceLevel", + "GetOrganizationRequest", + "ListAnalyticsAccountLinksRequest", + "ListAnalyticsAccountLinksResponse", + "CreateAnalyticsAccountLinkRequest", + "DeleteAnalyticsAccountLinkRequest", + "SetPropertyServiceLevelRequest", + "SetPropertyServiceLevelResponse", + }, +) + + +class AnalyticsServiceLevel(proto.Enum): + r"""Various levels of service for Google Analytics. + + Values: + ANALYTICS_SERVICE_LEVEL_UNSPECIFIED (0): + Service level unspecified. + ANALYTICS_SERVICE_LEVEL_STANDARD (1): + The standard version of Google Analytics. + ANALYTICS_SERVICE_LEVEL_360 (2): + The premium version of Google Analytics. + """ + ANALYTICS_SERVICE_LEVEL_UNSPECIFIED = 0 + ANALYTICS_SERVICE_LEVEL_STANDARD = 1 + ANALYTICS_SERVICE_LEVEL_360 = 2 + + +class GetOrganizationRequest(proto.Message): + r"""Request message for GetOrganization RPC. + + Attributes: + name (str): + Required. The name of the Organization to retrieve. Format: + organizations/{org_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListAnalyticsAccountLinksRequest(proto.Message): + r"""Request message for ListAnalyticsAccountLinks RPC. + + Attributes: + parent (str): + Required. The parent organization, which owns this + collection of Analytics account links. Format: + organizations/{org_id} + page_size (int): + Optional. The maximum number of Analytics + account links to return in one call. The service + may return fewer than this value. + + If unspecified, at most 50 Analytics account + links will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ListAnalyticsAccountLinks call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAnalyticsAccountLinks`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAnalyticsAccountLinksResponse(proto.Message): + r"""Response message for ListAnalyticsAccountLinks RPC. + + Attributes: + analytics_account_links (MutableSequence[google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink]): + Analytics account links in this organization. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + analytics_account_links: MutableSequence[ + resources.AnalyticsAccountLink + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.AnalyticsAccountLink, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateAnalyticsAccountLinkRequest(proto.Message): + r"""Request message for CreateAnalyticsAccountLink RPC. + + Attributes: + parent (str): + Required. The parent resource where this Analytics account + link will be created. Format: organizations/{org_id} + analytics_account_link (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink): + Required. The Analytics account link to + create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + analytics_account_link: resources.AnalyticsAccountLink = proto.Field( + proto.MESSAGE, + number=2, + message=resources.AnalyticsAccountLink, + ) + + +class DeleteAnalyticsAccountLinkRequest(proto.Message): + r"""Request message for DeleteAnalyticsAccountLink RPC. + + Attributes: + name (str): + Required. The name of the Analytics account link to delete. + Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SetPropertyServiceLevelRequest(proto.Message): + r"""Request message for SetPropertyServiceLevel RPC. + + Attributes: + analytics_account_link (str): + Required. The parent AnalyticsAccountLink scope where this + property is in. Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + analytics_property (str): + Required. The Analytics property to change the ServiceLevel + setting. This field is the name of the Google Analytics + Admin API property resource. + + Format: + analyticsadmin.googleapis.com/properties/{property_id} + service_level (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsServiceLevel): + Required. The service level to set for this + property. + """ + + analytics_account_link: str = proto.Field( + proto.STRING, + number=1, + ) + analytics_property: str = proto.Field( + proto.STRING, + number=2, + ) + service_level: "AnalyticsServiceLevel" = proto.Field( + proto.ENUM, + number=3, + enum="AnalyticsServiceLevel", + ) + + +class SetPropertyServiceLevelResponse(proto.Message): + r"""Response message for SetPropertyServiceLevel RPC.""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py new file mode 100644 index 000000000000..420e17747cf7 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.marketingplatform.admin.v1alpha", + manifest={ + "LinkVerificationState", + "Organization", + "AnalyticsAccountLink", + }, +) + + +class LinkVerificationState(proto.Enum): + r"""The verification state of the link between a product account + and a GMP organization. + + Values: + LINK_VERIFICATION_STATE_UNSPECIFIED (0): + The link state is unknown. + LINK_VERIFICATION_STATE_VERIFIED (1): + The link is established. + LINK_VERIFICATION_STATE_NOT_VERIFIED (2): + The link is requested, but hasn't been + approved by the product account admin. + """ + LINK_VERIFICATION_STATE_UNSPECIFIED = 0 + LINK_VERIFICATION_STATE_VERIFIED = 1 + LINK_VERIFICATION_STATE_NOT_VERIFIED = 2 + + +class Organization(proto.Message): + r"""A resource message representing a Google Marketing Platform + organization. + + Attributes: + name (str): + Identifier. The resource name of the GMP organization. + Format: organizations/{org_id} + display_name (str): + The human-readable name for the organization. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AnalyticsAccountLink(proto.Message): + r"""A resource message representing the link between a Google + Analytics account and a Google Marketing Platform organization. + + Attributes: + name (str): + Identifier. Resource name of this AnalyticsAccountLink. Note + the resource ID is the same as the ID of the Analtyics + account. + + Format: + organizations/{org_id}/analyticsAccountLinks/{analytics_account_link_id} + Example: "organizations/xyz/analyticsAccountLinks/1234". + analytics_account (str): + Required. Immutable. The resource name of the AnalyticsAdmin + API account. The account ID will be used as the ID of this + AnalyticsAccountLink resource, which will become the final + component of the resource name. + + Format: analyticsadmin.googleapis.com/accounts/{account_id} + display_name (str): + Output only. The human-readable name for the + Analytics account. + link_verification_state (google.ads.marketingplatform_admin_v1alpha.types.LinkVerificationState): + Output only. The verification state of the + link between the Analytics account and the + parent organization. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + analytics_account: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + link_verification_state: "LinkVerificationState" = proto.Field( + proto.ENUM, + number=4, + enum="LinkVerificationState", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-marketingplatform-admin/mypy.ini b/packages/google-ads-marketingplatform-admin/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-ads-marketingplatform-admin/noxfile.py b/packages/google-ads-marketingplatform-admin/noxfile.py new file mode 100644 index 000000000000..67b7265f7586 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/noxfile.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py similarity index 61% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_async.py rename to packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py index 764524b6c030..bfd28a483b92 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_deployment_async.py +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for CreateDeployment +# Snippet for CreateAnalyticsAccountLink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-ads-marketingplatform-admin -# [START apihub_v1_generated_ApiHub_CreateDeployment_async] +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,30 +31,26 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.ads import marketingplatform_admin_v1alpha -async def sample_create_deployment(): +async def sample_create_analytics_account_link(): # Create a client - client = apihub_v1.ApiHubAsyncClient() + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() # Initialize request argument(s) - deployment = apihub_v1.Deployment() - deployment.display_name = "display_name_value" - deployment.deployment_type.enum_values.values.id = "id_value" - deployment.deployment_type.enum_values.values.display_name = "display_name_value" - deployment.resource_uri = "resource_uri_value" - deployment.endpoints = ['endpoints_value1', 'endpoints_value2'] - - request = apihub_v1.CreateDeploymentRequest( + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" + + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( parent="parent_value", - deployment=deployment, + analytics_account_link=analytics_account_link, ) # Make the request - response = await client.create_deployment(request=request) + response = await client.create_analytics_account_link(request=request) # Handle the response print(response) -# [END apihub_v1_generated_ApiHub_CreateDeployment_async] +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py similarity index 62% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py rename to packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py index 41874f8f4024..6af1b08a5a3b 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for CreateRuntimeProjectAttachment +# Snippet for CreateAnalyticsAccountLink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-ads-marketingplatform-admin -# [START apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_async] +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,27 +31,26 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.ads import marketingplatform_admin_v1alpha -async def sample_create_runtime_project_attachment(): +def sample_create_analytics_account_link(): # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() # Initialize request argument(s) - runtime_project_attachment = apihub_v1.RuntimeProjectAttachment() - runtime_project_attachment.runtime_project = "runtime_project_value" + analytics_account_link = marketingplatform_admin_v1alpha.AnalyticsAccountLink() + analytics_account_link.analytics_account = "analytics_account_value" - request = apihub_v1.CreateRuntimeProjectAttachmentRequest( + request = marketingplatform_admin_v1alpha.CreateAnalyticsAccountLinkRequest( parent="parent_value", - runtime_project_attachment_id="runtime_project_attachment_id_value", - runtime_project_attachment=runtime_project_attachment, + analytics_account_link=analytics_account_link, ) # Make the request - response = await client.create_runtime_project_attachment(request=request) + response = client.create_analytics_account_link(request=request) # Handle the response print(response) -# [END apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_async] +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py new file mode 100644 index 000000000000..c0b2c7e1ffa9 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + await client.delete_analytics_account_link(request=request) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py new file mode 100644 index 000000000000..8f1a794eacac --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAnalyticsAccountLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_delete_analytics_account_link(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Make the request + client.delete_analytics_account_link(request=request) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py new file mode 100644 index 000000000000..7666fa53e916 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrganization +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_organization(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py new file mode 100644 index 000000000000..52b506c61914 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetOrganization +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_get_organization(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.GetOrganizationRequest( + name="name_value", + ) + + # Make the request + response = client.get_organization(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py new file mode 100644 index 000000000000..3837010ff87f --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAnalyticsAccountLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py new file mode 100644 index 000000000000..af3ed458056a --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAnalyticsAccountLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_list_analytics_account_links(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListAnalyticsAccountLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_analytics_account_links(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py new file mode 100644 index 000000000000..b07e73cde9f5 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetPropertyServiceLevel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = await client.set_property_service_level(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py new file mode 100644 index 000000000000..a742b4f50f64 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetPropertyServiceLevel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_set_property_service_level(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + service_level="ANALYTICS_SERVICE_LEVEL_360", + ) + + # Make the request + response = client.set_property_service_level(request=request) + + # Handle the response + print(response) + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json new file mode 100644 index 000000000000..72abc6186c7b --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json @@ -0,0 +1,822 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.marketingplatform.admin.v1alpha", + "version": "v1alpha" + } + ], + "language": "PYTHON", + "name": "google-ads-marketingplatform-admin", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.create_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.CreateAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "CreateAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "analytics_account_link", + "type": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink", + "shortName": "create_analytics_account_link" + }, + "description": "Sample for CreateAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.create_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.CreateAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "CreateAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.CreateAnalyticsAccountLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "analytics_account_link", + "type": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.AnalyticsAccountLink", + "shortName": "create_analytics_account_link" + }, + "description": "Sample for CreateAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.delete_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.DeleteAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "DeleteAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_analytics_account_link" + }, + "description": "Sample for DeleteAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.delete_analytics_account_link", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.DeleteAnalyticsAccountLink", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "DeleteAnalyticsAccountLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.DeleteAnalyticsAccountLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_analytics_account_link" + }, + "description": "Sample for DeleteAnalyticsAccountLink", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_DeleteAnalyticsAccountLink_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.get_organization", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.GetOrganization", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "GetOrganization" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.Organization", + "shortName": "get_organization" + }, + "description": "Sample for GetOrganization", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.get_organization", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.GetOrganization", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "GetOrganization" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.GetOrganizationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.Organization", + "shortName": "get_organization" + }, + "description": "Sample for GetOrganization", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.list_analytics_account_links", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.ListAnalyticsAccountLinks", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "ListAnalyticsAccountLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksAsyncPager", + "shortName": "list_analytics_account_links" + }, + "description": "Sample for ListAnalyticsAccountLinks", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.list_analytics_account_links", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.ListAnalyticsAccountLinks", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "ListAnalyticsAccountLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.ListAnalyticsAccountLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListAnalyticsAccountLinksPager", + "shortName": "list_analytics_account_links" + }, + "description": "Sample for ListAnalyticsAccountLinks", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.set_property_service_level", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.SetPropertyServiceLevel", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "SetPropertyServiceLevel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest" + }, + { + "name": "analytics_account_link", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse", + "shortName": "set_property_service_level" + }, + "description": "Sample for SetPropertyServiceLevel", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.set_property_service_level", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.SetPropertyServiceLevel", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "SetPropertyServiceLevel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelRequest" + }, + { + "name": "analytics_account_link", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.SetPropertyServiceLevelResponse", + "shortName": "set_property_service_level" + }, + "description": "Sample for SetPropertyServiceLevel", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py" + } + ] +} diff --git a/packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh b/packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py b/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py new file mode 100644 index 000000000000..eb6dc67078de --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py @@ -0,0 +1,180 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class marketingplatform_adminCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_analytics_account_link': ('parent', 'analytics_account_link', ), + 'delete_analytics_account_link': ('name', ), + 'get_organization': ('name', ), + 'list_analytics_account_links': ('parent', 'page_size', 'page_token', ), + 'set_property_service_level': ('analytics_account_link', 'analytics_property', 'service_level', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=marketingplatform_adminCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the marketingplatform_admin client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-ads-marketingplatform-admin/setup.py b/packages/google-ads-marketingplatform-admin/setup.py new file mode 100644 index 000000000000..bd6f637c0bf8 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/setup.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-ads-marketingplatform-admin" + + +description = "Google Ads Marketingplatform Admin API client library" + +version = None + +with open( + os.path.join(package_root, "google/ads/marketingplatform_admin/gapic_version.py") +) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-ads-marketingplatform-admin/testing/.gitignore b/packages/google-ads-marketingplatform-admin/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt b/packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-ads-marketingplatform-admin/tests/__init__.py b/packages/google-ads-marketingplatform-admin/tests/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/__init__.py b/packages/google-ads-marketingplatform-admin/tests/unit/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py new file mode 100644 index 000000000000..c0d88b7ea387 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py @@ -0,0 +1,5969 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service import ( + MarketingplatformAdminServiceAsyncClient, + MarketingplatformAdminServiceClient, + pagers, + transports, +) +from google.ads.marketingplatform_admin_v1alpha.types import ( + marketingplatform_admin, + resources, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MarketingplatformAdminServiceClient._get_default_mtls_endpoint(None) is None + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + MarketingplatformAdminServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + MarketingplatformAdminServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert MarketingplatformAdminServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source(None, False) is None + ) + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + MarketingplatformAdminServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == MarketingplatformAdminServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + MarketingplatformAdminServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + MarketingplatformAdminServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + MarketingplatformAdminServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + MarketingplatformAdminServiceClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + MarketingplatformAdminServiceClient._get_universe_domain(None, None) + == MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + MarketingplatformAdminServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MarketingplatformAdminServiceClient, "grpc"), + (MarketingplatformAdminServiceAsyncClient, "grpc_asyncio"), + (MarketingplatformAdminServiceClient, "rest"), + ], +) +def test_marketingplatform_admin_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MarketingplatformAdminServiceGrpcTransport, "grpc"), + (transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MarketingplatformAdminServiceRestTransport, "rest"), + ], +) +def test_marketingplatform_admin_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MarketingplatformAdminServiceClient, "grpc"), + (MarketingplatformAdminServiceAsyncClient, "grpc_asyncio"), + (MarketingplatformAdminServiceClient, "rest"), + ], +) +def test_marketingplatform_admin_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com" + ) + + +def test_marketingplatform_admin_service_client_get_transport_class(): + transport = MarketingplatformAdminServiceClient.get_transport_class() + available_transports = [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceRestTransport, + ] + assert transport in available_transports + + transport = MarketingplatformAdminServiceClient.get_transport_class("grpc") + assert transport == transports.MarketingplatformAdminServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +def test_marketingplatform_admin_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + MarketingplatformAdminServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + MarketingplatformAdminServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + "true", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + "false", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + "true", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_marketingplatform_admin_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [MarketingplatformAdminServiceClient, MarketingplatformAdminServiceAsyncClient], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MarketingplatformAdminServiceAsyncClient), +) +def test_marketingplatform_admin_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [MarketingplatformAdminServiceClient, MarketingplatformAdminServiceAsyncClient], +) +@mock.patch.object( + MarketingplatformAdminServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceClient), +) +@mock.patch.object( + MarketingplatformAdminServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MarketingplatformAdminServiceAsyncClient), +) +def test_marketingplatform_admin_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = MarketingplatformAdminServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + MarketingplatformAdminServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + ), + ], +) +def test_marketingplatform_admin_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceRestTransport, + "rest", + None, + ), + ], +) +def test_marketingplatform_admin_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_marketingplatform_admin_service_client_client_options_from_dict(): + with mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = MarketingplatformAdminServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_marketingplatform_admin_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "marketingplatformadmin.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + scopes=None, + default_host="marketingplatformadmin.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.GetOrganizationRequest, + dict, + ], +) +def test_get_organization(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Organization( + name="name_value", + display_name="display_name_value", + ) + response = client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.GetOrganizationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Organization) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_organization_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_organization() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.GetOrganizationRequest() + + +def test_get_organization_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.GetOrganizationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_organization(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.GetOrganizationRequest( + name="name_value", + ) + + +def test_get_organization_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_organization in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_organization + ] = mock_rpc + request = {} + client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_organization(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_organization_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_organization() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.GetOrganizationRequest() + + +@pytest.mark.asyncio +async def test_get_organization_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_organization + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_organization + ] = mock_rpc + + request = {} + await client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_organization(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_organization_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.GetOrganizationRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.GetOrganizationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Organization) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_organization_async_from_dict(): + await test_get_organization_async(request_type=dict) + + +def test_get_organization_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.GetOrganizationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value = resources.Organization() + client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_organization_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.GetOrganizationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization() + ) + await client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_organization_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Organization() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_organization( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_organization_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_organization( + marketingplatform_admin.GetOrganizationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_organization_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Organization() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_organization( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_organization_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_organization( + marketingplatform_admin.GetOrganizationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.ListAnalyticsAccountLinksRequest, + dict, + ], +) +def test_list_analytics_account_links(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnalyticsAccountLinksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_analytics_account_links_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_analytics_account_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + +def test_list_analytics_account_links_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_analytics_account_links(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_analytics_account_links_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_analytics_account_links + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_analytics_account_links + ] = mock_rpc + request = {} + client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_analytics_account_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_analytics_account_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_analytics_account_links + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_analytics_account_links + ] = mock_rpc + + request = {} + await client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_analytics_account_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnalyticsAccountLinksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_from_dict(): + await test_list_analytics_account_links_async(request_type=dict) + + +def test_list_analytics_account_links_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) + await client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_analytics_account_links_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_analytics_account_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_analytics_account_links_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_analytics_account_links( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", + ) + + +def test_list_analytics_account_links_pager(transport_name: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_analytics_account_links( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in results) + + +def test_list_analytics_account_links_pages(transport_name: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + pages = list(client.list_analytics_account_links(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_pager(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_analytics_account_links( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in responses) + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_pages(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_analytics_account_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + dict, + ], +) +def test_create_analytics_account_link(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + response = client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) + + +def test_create_analytics_account_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + +def test_create_analytics_account_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_analytics_account_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + ) + + +def test_create_analytics_account_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_analytics_account_link + ] = mock_rpc + request = {} + client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + ) + response = await client.create_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_analytics_account_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_analytics_account_link + ] = mock_rpc + + request = {} + await client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + ) + response = await client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_async_from_dict(): + await test_create_analytics_account_link_async(request_type=dict) + + +def test_create_analytics_account_link_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value = resources.AnalyticsAccountLink() + client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink() + ) + await client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_analytics_account_link_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AnalyticsAccountLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_analytics_account_link( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].analytics_account_link + mock_val = resources.AnalyticsAccountLink(name="name_value") + assert arg == mock_val + + +def test_create_analytics_account_link_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AnalyticsAccountLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.AnalyticsAccountLink() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_analytics_account_link( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].analytics_account_link + mock_val = resources.AnalyticsAccountLink(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_analytics_account_link_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + dict, + ], +) +def test_delete_analytics_account_link(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_analytics_account_link_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + +def test_delete_analytics_account_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_analytics_account_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + +def test_delete_analytics_account_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_analytics_account_link + ] = mock_rpc + request = {} + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_analytics_account_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_analytics_account_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_analytics_account_link + ] = mock_rpc + + request = {} + await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async_from_dict(): + await test_delete_analytics_account_link_async(request_type=dict) + + +def test_delete_analytics_account_link_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value = None + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_analytics_account_link_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_analytics_account_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_analytics_account_link_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_analytics_account_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.SetPropertyServiceLevelRequest, + dict, + ], +) +def test_set_property_service_level(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + response = client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +def test_set_property_service_level_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_property_service_level() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest() + + +def test_set_property_service_level_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_property_service_level(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + ) + + +def test_set_property_service_level_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_property_service_level + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_property_service_level + ] = mock_rpc + request = {} + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_property_service_level_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + response = await client.set_property_service_level() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest() + + +@pytest.mark.asyncio +async def test_set_property_service_level_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.set_property_service_level + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.set_property_service_level + ] = mock_rpc + + request = {} + await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_property_service_level_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + response = await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +@pytest.mark.asyncio +async def test_set_property_service_level_async_from_dict(): + await test_set_property_service_level_async(request_type=dict) + + +def test_set_property_service_level_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + + request.analytics_account_link = "analytics_account_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "analytics_account_link=analytics_account_link_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_property_service_level_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + + request.analytics_account_link = "analytics_account_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "analytics_account_link=analytics_account_link_value", + ) in kw["metadata"] + + +def test_set_property_service_level_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_property_service_level( + analytics_account_link="analytics_account_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].analytics_account_link + mock_val = "analytics_account_link_value" + assert arg == mock_val + + +def test_set_property_service_level_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", + ) + + +@pytest.mark.asyncio +async def test_set_property_service_level_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_property_service_level( + analytics_account_link="analytics_account_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].analytics_account_link + mock_val = "analytics_account_link_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_property_service_level_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.GetOrganizationRequest, + dict, + ], +) +def test_get_organization_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Organization( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_organization(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Organization) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_organization_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_organization in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_organization + ] = mock_rpc + + request = {} + client.get_organization(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_organization(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_organization_rest_required_fields( + request_type=marketingplatform_admin.GetOrganizationRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Organization() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_organization(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_organization_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_organization._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_organization_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, "post_get_organization" + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, "pre_get_organization" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.GetOrganizationRequest.pb( + marketingplatform_admin.GetOrganizationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.Organization.to_json( + resources.Organization() + ) + + request = marketingplatform_admin.GetOrganizationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Organization() + + client.get_organization( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_organization_rest_bad_request( + transport: str = "rest", request_type=marketingplatform_admin.GetOrganizationRequest +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_organization(request) + + +def test_get_organization_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Organization() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_organization(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=organizations/*}" % client.transport._host, args[1] + ) + + +def test_get_organization_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_organization( + marketingplatform_admin.GetOrganizationRequest(), + name="name_value", + ) + + +def test_get_organization_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.ListAnalyticsAccountLinksRequest, + dict, + ], +) +def test_list_analytics_account_links_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_analytics_account_links(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnalyticsAccountLinksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_analytics_account_links_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_analytics_account_links + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_analytics_account_links + ] = mock_rpc + + request = {} + client.list_analytics_account_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_analytics_account_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_analytics_account_links_rest_required_fields( + request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_analytics_account_links._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_analytics_account_links._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_analytics_account_links(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_analytics_account_links_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_analytics_account_links._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_analytics_account_links_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_list_analytics_account_links", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_list_analytics_account_links", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.ListAnalyticsAccountLinksRequest.pb( + marketingplatform_admin.ListAnalyticsAccountLinksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse.to_json( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) + ) + + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + + client.list_analytics_account_links( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_analytics_account_links_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_analytics_account_links(request) + + +def test_list_analytics_account_links_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_analytics_account_links(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=organizations/*}/analyticsAccountLinks" + % client.transport._host, + args[1], + ) + + +def test_list_analytics_account_links_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", + ) + + +def test_list_analytics_account_links_rest_pager(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + marketingplatform_admin.ListAnalyticsAccountLinksResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1"} + + pager = client.list_analytics_account_links(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in results) + + pages = list(client.list_analytics_account_links(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + dict, + ], +) +def test_create_analytics_account_link_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request_init["analytics_account_link"] = { + "name": "name_value", + "analytics_account": "analytics_account_value", + "display_name": "display_name_value", + "link_verification_state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = marketingplatform_admin.CreateAnalyticsAccountLinkRequest.meta.fields[ + "analytics_account_link" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "analytics_account_link" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["analytics_account_link"][field])): + del request_init["analytics_account_link"][field][i][subfield] + else: + del request_init["analytics_account_link"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AnalyticsAccountLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_analytics_account_link(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) + + +def test_create_analytics_account_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_analytics_account_link + ] = mock_rpc + + request = {} + client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_analytics_account_link_rest_required_fields( + request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.AnalyticsAccountLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.AnalyticsAccountLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_analytics_account_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_analytics_account_link_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_analytics_account_link._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "analyticsAccountLink", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_analytics_account_link_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_create_analytics_account_link", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_create_analytics_account_link", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.CreateAnalyticsAccountLinkRequest.pb( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = resources.AnalyticsAccountLink.to_json( + resources.AnalyticsAccountLink() + ) + + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.AnalyticsAccountLink() + + client.create_analytics_account_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_analytics_account_link_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_analytics_account_link(request) + + +def test_create_analytics_account_link_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AnalyticsAccountLink() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AnalyticsAccountLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_analytics_account_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=organizations/*}/analyticsAccountLinks" + % client.transport._host, + args[1], + ) + + +def test_create_analytics_account_link_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +def test_create_analytics_account_link_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + dict, + ], +) +def test_delete_analytics_account_link_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_analytics_account_link(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_analytics_account_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_analytics_account_link + ] = mock_rpc + + request = {} + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_analytics_account_link_rest_required_fields( + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_analytics_account_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_analytics_account_link_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_analytics_account_link._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_analytics_account_link_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_delete_analytics_account_link", + ) as pre: + pre.assert_not_called() + pb_message = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest.pb( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_analytics_account_link( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_analytics_account_link_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_analytics_account_link(request) + + +def test_delete_analytics_account_link_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_analytics_account_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=organizations/*/analyticsAccountLinks/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_analytics_account_link_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +def test_delete_analytics_account_link_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.SetPropertyServiceLevelRequest, + dict, + ], +) +def test_set_property_service_level_rest(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_property_service_level(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +def test_set_property_service_level_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_property_service_level + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_property_service_level + ] = mock_rpc + + request = {} + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_property_service_level_rest_required_fields( + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["analytics_account_link"] = "" + request_init["analytics_property"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_property_service_level._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["analyticsAccountLink"] = "analytics_account_link_value" + jsonified_request["analyticsProperty"] = "analytics_property_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_property_service_level._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "analyticsAccountLink" in jsonified_request + assert jsonified_request["analyticsAccountLink"] == "analytics_account_link_value" + assert "analyticsProperty" in jsonified_request + assert jsonified_request["analyticsProperty"] == "analytics_property_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_property_service_level(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_property_service_level_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_property_service_level._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "analyticsAccountLink", + "analyticsProperty", + "serviceLevel", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_property_service_level_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_set_property_service_level", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_set_property_service_level", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = marketingplatform_admin.SetPropertyServiceLevelRequest.pb( + marketingplatform_admin.SetPropertyServiceLevelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + marketingplatform_admin.SetPropertyServiceLevelResponse.to_json( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + ) + + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + client.set_property_service_level( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_property_service_level_rest_bad_request( + transport: str = "rest", + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_property_service_level(request) + + +def test_set_property_service_level_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" + } + + # get truthy value for each flattened field + mock_args = dict( + analytics_account_link="analytics_account_link_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_property_service_level(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{analytics_account_link=organizations/*/analyticsAccountLinks/*}:setPropertyServiceLevel" + % client.transport._host, + args[1], + ) + + +def test_set_property_service_level_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", + ) + + +def test_set_property_service_level_rest_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MarketingplatformAdminServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MarketingplatformAdminServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + transports.MarketingplatformAdminServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = MarketingplatformAdminServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MarketingplatformAdminServiceGrpcTransport, + ) + + +def test_marketingplatform_admin_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MarketingplatformAdminServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_marketingplatform_admin_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MarketingplatformAdminServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_organization", + "list_analytics_account_links", + "create_analytics_account_link", + "delete_analytics_account_link", + "set_property_service_level", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_marketingplatform_admin_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MarketingplatformAdminServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + quota_project_id="octopus", + ) + + +def test_marketingplatform_admin_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.transports.MarketingplatformAdminServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MarketingplatformAdminServiceTransport() + adc.assert_called_once() + + +def test_marketingplatform_admin_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MarketingplatformAdminServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + transports.MarketingplatformAdminServiceRestTransport, + ], +) +def test_marketingplatform_admin_service_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MarketingplatformAdminServiceGrpcTransport, grpc_helpers), + ( + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_marketingplatform_admin_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "marketingplatformadmin.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.read", + "https://www.googleapis.com/auth/marketingplatformadmin.analytics.update", + ), + scopes=["1", "2"], + default_host="marketingplatformadmin.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_marketingplatform_admin_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MarketingplatformAdminServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_marketingplatform_admin_service_host_no_port(transport_name): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="marketingplatformadmin.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_marketingplatform_admin_service_host_with_port(transport_name): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="marketingplatformadmin.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "marketingplatformadmin.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://marketingplatformadmin.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_marketingplatform_admin_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MarketingplatformAdminServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MarketingplatformAdminServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_organization._session + session2 = client2.transport.get_organization._session + assert session1 != session2 + session1 = client1.transport.list_analytics_account_links._session + session2 = client2.transport.list_analytics_account_links._session + assert session1 != session2 + session1 = client1.transport.create_analytics_account_link._session + session2 = client2.transport.create_analytics_account_link._session + assert session1 != session2 + session1 = client1.transport.delete_analytics_account_link._session + session2 = client2.transport.delete_analytics_account_link._session + assert session1 != session2 + session1 = client1.transport.set_property_service_level._session + session2 = client2.transport.set_property_service_level._session + assert session1 != session2 + + +def test_marketingplatform_admin_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MarketingplatformAdminServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_marketingplatform_admin_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MarketingplatformAdminServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.MarketingplatformAdminServiceGrpcTransport, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ], +) +def test_marketingplatform_admin_service_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_account_path(): + account = "squid" + expected = "accounts/{account}".format( + account=account, + ) + actual = MarketingplatformAdminServiceClient.account_path(account) + assert expected == actual + + +def test_parse_account_path(): + expected = { + "account": "clam", + } + path = MarketingplatformAdminServiceClient.account_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_account_path(path) + assert expected == actual + + +def test_analytics_account_link_path(): + organization = "whelk" + analytics_account_link = "octopus" + expected = "organizations/{organization}/analyticsAccountLinks/{analytics_account_link}".format( + organization=organization, + analytics_account_link=analytics_account_link, + ) + actual = MarketingplatformAdminServiceClient.analytics_account_link_path( + organization, analytics_account_link + ) + assert expected == actual + + +def test_parse_analytics_account_link_path(): + expected = { + "organization": "oyster", + "analytics_account_link": "nudibranch", + } + path = MarketingplatformAdminServiceClient.analytics_account_link_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_analytics_account_link_path(path) + assert expected == actual + + +def test_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MarketingplatformAdminServiceClient.organization_path(organization) + assert expected == actual + + +def test_parse_organization_path(): + expected = { + "organization": "mussel", + } + path = MarketingplatformAdminServiceClient.organization_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_organization_path(path) + assert expected == actual + + +def test_property_path(): + property = "winkle" + expected = "properties/{property}".format( + property=property, + ) + actual = MarketingplatformAdminServiceClient.property_path(property) + assert expected == actual + + +def test_parse_property_path(): + expected = { + "property": "nautilus", + } + path = MarketingplatformAdminServiceClient.property_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_property_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = MarketingplatformAdminServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = MarketingplatformAdminServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = MarketingplatformAdminServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = MarketingplatformAdminServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MarketingplatformAdminServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = MarketingplatformAdminServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = MarketingplatformAdminServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = MarketingplatformAdminServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = MarketingplatformAdminServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = MarketingplatformAdminServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MarketingplatformAdminServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.MarketingplatformAdminServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.MarketingplatformAdminServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = MarketingplatformAdminServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + MarketingplatformAdminServiceClient, + transports.MarketingplatformAdminServiceGrpcTransport, + ), + ( + MarketingplatformAdminServiceAsyncClient, + transports.MarketingplatformAdminServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/CHANGELOG.md b/packages/google-ai-generativelanguage/CHANGELOG.md index d3b8538f00c3..c6fa336083ab 100644 --- a/packages/google-ai-generativelanguage/CHANGELOG.md +++ b/packages/google-ai-generativelanguage/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [0.6.10](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.9...google-ai-generativelanguage-v0.6.10) (2024-09-23) + + +### Features + +* Add GenerationConfig.{presence_penalty, frequency_penalty, logprobs, response_logprobs, logprobs} and Candidate.{avg_logprobs, logprobs_result} ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add GoogleSearchRetrieval tool and candidate.grounding_metadata ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add HarmBlockThreshold.OFF ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add HarmCategory.HARM_CATEGORY_CIVIC_INTEGRITY ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add PredictionService (for Imagen) ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add Schema.min_items ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Add TunedModels.reader_project_numbers ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) + + +### Documentation + +* Small fixes ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) +* Tag HarmCategories by the model family they're used on. ([d6238e4](https://github.com/googleapis/google-cloud-python/commit/d6238e49a17caf54dd0fbc45215527beed057cc5)) + ## [0.6.9](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.8...google-ai-generativelanguage-v0.6.9) (2024-08-19) diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst new file mode 100644 index 000000000000..7b2b932acacc --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/prediction_service.rst @@ -0,0 +1,6 @@ +PredictionService +----------------------------------- + +.. automodule:: google.ai.generativelanguage_v1beta.services.prediction_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst index 24e6184e8b0e..7a7b5429bd6f 100644 --- a/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1beta/services_.rst @@ -9,5 +9,6 @@ Services for Google Ai Generativelanguage v1beta API generative_service model_service permission_service + prediction_service retriever_service text_service diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py index c69803d506d6..750b54051c3f 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py @@ -54,6 +54,12 @@ from google.ai.generativelanguage_v1beta.services.permission_service.client import ( PermissionServiceClient, ) +from google.ai.generativelanguage_v1beta.services.prediction_service.async_client import ( + PredictionServiceAsyncClient, +) +from google.ai.generativelanguage_v1beta.services.prediction_service.client import ( + PredictionServiceClient, +) from google.ai.generativelanguage_v1beta.services.retriever_service.async_client import ( RetrieverServiceAsyncClient, ) @@ -84,12 +90,14 @@ CodeExecution, CodeExecutionResult, Content, + DynamicRetrievalConfig, ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, FunctionDeclaration, FunctionResponse, + GoogleSearchRetrieval, GroundingPassage, GroundingPassages, Part, @@ -132,6 +140,13 @@ GenerateContentResponse, GenerationConfig, GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + RetrievalMetadata, + SearchEntryPoint, + Segment, SemanticRetrieverConfig, TaskType, ) @@ -159,6 +174,10 @@ TransferOwnershipResponse, UpdatePermissionRequest, ) +from google.ai.generativelanguage_v1beta.types.prediction_service import ( + PredictRequest, + PredictResponse, +) from google.ai.generativelanguage_v1beta.types.retriever import ( Chunk, ChunkData, @@ -243,6 +262,8 @@ "ModelServiceAsyncClient", "PermissionServiceClient", "PermissionServiceAsyncClient", + "PredictionServiceClient", + "PredictionServiceAsyncClient", "RetrieverServiceClient", "RetrieverServiceAsyncClient", "TextServiceClient", @@ -260,12 +281,14 @@ "CodeExecution", "CodeExecutionResult", "Content", + "DynamicRetrievalConfig", "ExecutableCode", "FileData", "FunctionCall", "FunctionCallingConfig", "FunctionDeclaration", "FunctionResponse", + "GoogleSearchRetrieval", "GroundingPassage", "GroundingPassages", "Part", @@ -303,6 +326,13 @@ "GenerateContentResponse", "GenerationConfig", "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", + "GroundingSupport", + "LogprobsResult", + "RetrievalMetadata", + "SearchEntryPoint", + "Segment", "SemanticRetrieverConfig", "TaskType", "Model", @@ -325,6 +355,8 @@ "TransferOwnershipRequest", "TransferOwnershipResponse", "UpdatePermissionRequest", + "PredictRequest", + "PredictResponse", "Chunk", "ChunkData", "Condition", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py index 1699c98da708..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py index 4c8665b1b49a..a383f98f4342 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/__init__.py @@ -37,6 +37,7 @@ GenerateContentRequest, GenerateContentResponse, GenerationConfig, + LogprobsResult, TaskType, ) from .types.model import Model @@ -66,6 +67,7 @@ "HarmCategory", "ListModelsRequest", "ListModelsResponse", + "LogprobsResult", "Model", "ModelServiceClient", "Part", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py index 1699c98da708..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py index 7498cba8d7e3..7caa772eb19f 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py @@ -1057,6 +1057,10 @@ def __call__( "method": "get", "uri": "/v1/{name=tunedModels/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=generatedFiles/*}/operations/*", + }, ] request, metadata = self._interceptor.pre_get_operation(request, metadata) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py index e21bd17d6e8b..a431622cc869 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py @@ -572,6 +572,10 @@ def __call__( "method": "get", "uri": "/v1/{name=tunedModels/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=generatedFiles/*}/operations/*", + }, ] request, metadata = self._interceptor.pre_get_operation(request, metadata) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py index 522ecb07c1c3..9156b856ee0e 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/__init__.py @@ -27,6 +27,7 @@ GenerateContentRequest, GenerateContentResponse, GenerationConfig, + LogprobsResult, TaskType, ) from .model import Model @@ -50,6 +51,7 @@ "GenerateContentRequest", "GenerateContentResponse", "GenerationConfig", + "LogprobsResult", "TaskType", "Model", "GetModelRequest", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py index e19c5b166abc..e8062906bfbf 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py @@ -31,6 +31,7 @@ "GenerationConfig", "GenerateContentResponse", "Candidate", + "LogprobsResult", "EmbedContentRequest", "ContentEmbedding", "EmbedContentResponse", @@ -228,6 +229,58 @@ class GenerationConfig(proto.Message): doesn't allow setting ``top_k`` on requests. This field is a member of `oneof`_ ``_top_k``. + presence_penalty (float): + Optional. Presence penalty applied to the next token's + logprobs if the token has already been seen in the response. + + This penalty is binary on/off and not dependant on the + number of times the token is used (after the first). Use + [frequency_penalty][google.ai.generativelanguage.v1.GenerationConfig.frequency_penalty] + for a penalty that increases with each use. + + A positive penalty will discourage the use of tokens that + have already been used in the response, increasing the + vocabulary. + + A negative penalty will encourage the use of tokens that + have already been used in the response, decreasing the + vocabulary. + + This field is a member of `oneof`_ ``_presence_penalty``. + frequency_penalty (float): + Optional. Frequency penalty applied to the next token's + logprobs, multiplied by the number of times each token has + been seen in the respponse so far. + + A positive penalty will discourage the use of tokens that + have already been used, proportional to the number of times + the token has been used: The more a token is used, the more + dificult it is for the model to use that token again + increasing the vocabulary of responses. + + Caution: A *negative* penalty will encourage the model to + reuse tokens proportional to the number of times the token + has been used. Small negative values will reduce the + vocabulary of a response. Larger negative values will cause + the model to start repeating a common token until it hits + the + [max_output_tokens][google.ai.generativelanguage.v1.GenerationConfig.max_output_tokens] + limit: "...the the the the the...". + + This field is a member of `oneof`_ ``_frequency_penalty``. + response_logprobs (bool): + Optional. If true, export the logprobs + results in response. + + This field is a member of `oneof`_ ``_response_logprobs``. + logprobs (int): + Optional. Only valid if + [response_logprobs=True][google.ai.generativelanguage.v1.GenerationConfig.response_logprobs]. + This sets the number of top logprobs to return at each + decoding step in the + [Candidate.logprobs_result][google.ai.generativelanguage.v1.Candidate.logprobs_result]. + + This field is a member of `oneof`_ ``_logprobs``. """ candidate_count: int = proto.Field( @@ -259,6 +312,26 @@ class GenerationConfig(proto.Message): number=7, optional=True, ) + presence_penalty: float = proto.Field( + proto.FLOAT, + number=15, + optional=True, + ) + frequency_penalty: float = proto.Field( + proto.FLOAT, + number=16, + optional=True, + ) + response_logprobs: bool = proto.Field( + proto.BOOL, + number=17, + optional=True, + ) + logprobs: int = proto.Field( + proto.INT32, + number=18, + optional=True, + ) class GenerateContentResponse(proto.Message): @@ -414,6 +487,11 @@ class Candidate(proto.Message): foundational LLM's training data. token_count (int): Output only. Token count for this candidate. + avg_logprobs (float): + Output only. + logprobs_result (google.ai.generativelanguage_v1.types.LogprobsResult): + Output only. Log-likelihood scores for the + response tokens and top tokens """ class FinishReason(proto.Enum): @@ -494,6 +572,89 @@ class FinishReason(proto.Enum): proto.INT32, number=7, ) + avg_logprobs: float = proto.Field( + proto.DOUBLE, + number=10, + ) + logprobs_result: "LogprobsResult" = proto.Field( + proto.MESSAGE, + number=11, + message="LogprobsResult", + ) + + +class LogprobsResult(proto.Message): + r"""Logprobs Result + + Attributes: + top_candidates (MutableSequence[google.ai.generativelanguage_v1.types.LogprobsResult.TopCandidates]): + Length = total number of decoding steps. + chosen_candidates (MutableSequence[google.ai.generativelanguage_v1.types.LogprobsResult.Candidate]): + Length = total number of decoding steps. The chosen + candidates may or may not be in top_candidates. + """ + + class Candidate(proto.Message): + r"""Candidate for the logprobs token and score. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + token (str): + The candidate’s token string value. + + This field is a member of `oneof`_ ``_token``. + token_id (int): + The candidate’s token id value. + + This field is a member of `oneof`_ ``_token_id``. + log_probability (float): + The candidate's log probability. + + This field is a member of `oneof`_ ``_log_probability``. + """ + + token: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + token_id: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + log_probability: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class TopCandidates(proto.Message): + r"""Candidates with top log probabilities at each decoding step. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1.types.LogprobsResult.Candidate]): + Sorted by log probability in descending + order. + """ + + candidates: MutableSequence["LogprobsResult.Candidate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogprobsResult.Candidate", + ) + + top_candidates: MutableSequence[TopCandidates] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=TopCandidates, + ) + chosen_candidates: MutableSequence[Candidate] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Candidate, + ) class EmbedContentRequest(proto.Message): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py index 100fc75977da..2a75fd715410 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/safety.py @@ -39,31 +39,32 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_UNSPECIFIED (0): Category is unspecified. HARM_CATEGORY_DEROGATORY (1): - Negative or harmful comments targeting - identity and/or protected attribute. + **PaLM** - Negative or harmful comments targeting identity + and/or protected attribute. HARM_CATEGORY_TOXICITY (2): - Content that is rude, disrespectful, or - profane. + **PaLM** - Content that is rude, disrespectful, or profane. HARM_CATEGORY_VIOLENCE (3): - Describes scenarios depicting violence - against an individual or group, or general - descriptions of gore. + **PaLM** - Describes scenarios depicting violence against an + individual or group, or general descriptions of gore. HARM_CATEGORY_SEXUAL (4): - Contains references to sexual acts or other - lewd content. + **PaLM** - Contains references to sexual acts or other lewd + content. HARM_CATEGORY_MEDICAL (5): - Promotes unchecked medical advice. + **PaLM** - Promotes unchecked medical advice. HARM_CATEGORY_DANGEROUS (6): - Dangerous content that promotes, facilitates, - or encourages harmful acts. + **PaLM** - Dangerous content that promotes, facilitates, or + encourages harmful acts. HARM_CATEGORY_HARASSMENT (7): - Harasment content. + **Gemini** - Harassment content. HARM_CATEGORY_HATE_SPEECH (8): - Hate speech and content. + **Gemini** - Hate speech and content. HARM_CATEGORY_SEXUALLY_EXPLICIT (9): - Sexually explicit content. + **Gemini** - Sexually explicit content. HARM_CATEGORY_DANGEROUS_CONTENT (10): - Dangerous content. + **Gemini** - Dangerous content. + HARM_CATEGORY_CIVIC_INTEGRITY (11): + **Gemini** - Content that may be used to harm civic + integrity. """ HARM_CATEGORY_UNSPECIFIED = 0 HARM_CATEGORY_DEROGATORY = 1 @@ -76,6 +77,7 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_HATE_SPEECH = 8 HARM_CATEGORY_SEXUALLY_EXPLICIT = 9 HARM_CATEGORY_DANGEROUS_CONTENT = 10 + HARM_CATEGORY_CIVIC_INTEGRITY = 11 class SafetyRating(proto.Message): @@ -170,12 +172,15 @@ class HarmBlockThreshold(proto.Enum): be allowed. BLOCK_NONE (4): All content will be allowed. + OFF (5): + Turn off the safety filter. """ HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 BLOCK_LOW_AND_ABOVE = 1 BLOCK_MEDIUM_AND_ABOVE = 2 BLOCK_ONLY_HIGH = 3 BLOCK_NONE = 4 + OFF = 5 category: "HarmCategory" = proto.Field( proto.ENUM, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py index c692fa7725c9..73da8c53fefc 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py @@ -30,6 +30,10 @@ PermissionServiceAsyncClient, PermissionServiceClient, ) +from .services.prediction_service import ( + PredictionServiceAsyncClient, + PredictionServiceClient, +) from .services.retriever_service import ( RetrieverServiceAsyncClient, RetrieverServiceClient, @@ -50,12 +54,14 @@ CodeExecution, CodeExecutionResult, Content, + DynamicRetrievalConfig, ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, FunctionDeclaration, FunctionResponse, + GoogleSearchRetrieval, GroundingPassage, GroundingPassages, Part, @@ -98,6 +104,13 @@ GenerateContentResponse, GenerationConfig, GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + RetrievalMetadata, + SearchEntryPoint, + Segment, SemanticRetrieverConfig, TaskType, ) @@ -125,6 +138,7 @@ TransferOwnershipResponse, UpdatePermissionRequest, ) +from .types.prediction_service import PredictRequest, PredictResponse from .types.retriever import ( Chunk, ChunkData, @@ -203,6 +217,7 @@ "GenerativeServiceAsyncClient", "ModelServiceAsyncClient", "PermissionServiceAsyncClient", + "PredictionServiceAsyncClient", "RetrieverServiceAsyncClient", "TextServiceAsyncClient", "AttributionSourceId", @@ -256,6 +271,7 @@ "DeleteTunedModelRequest", "DiscussServiceClient", "Document", + "DynamicRetrievalConfig", "EmbedContentRequest", "EmbedContentResponse", "EmbedTextRequest", @@ -288,9 +304,13 @@ "GetModelRequest", "GetPermissionRequest", "GetTunedModelRequest", + "GoogleSearchRetrieval", "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", "GroundingPassage", "GroundingPassages", + "GroundingSupport", "HarmCategory", "Hyperparameters", "ListCachedContentsRequest", @@ -309,6 +329,7 @@ "ListPermissionsResponse", "ListTunedModelsRequest", "ListTunedModelsResponse", + "LogprobsResult", "Message", "MessagePrompt", "MetadataFilter", @@ -317,16 +338,22 @@ "Part", "Permission", "PermissionServiceClient", + "PredictRequest", + "PredictResponse", + "PredictionServiceClient", "QueryCorpusRequest", "QueryCorpusResponse", "QueryDocumentRequest", "QueryDocumentResponse", "RelevantChunk", + "RetrievalMetadata", "RetrieverServiceClient", "SafetyFeedback", "SafetyRating", "SafetySetting", "Schema", + "SearchEntryPoint", + "Segment", "SemanticRetrieverConfig", "StringList", "TaskType", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json index 24a3b2565007..7fd1909f6ca0 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_metadata.json @@ -569,6 +569,40 @@ } } }, + "PredictionService": { + "clients": { + "grpc": { + "libraryClient": "PredictionServiceClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PredictionServiceAsyncClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + }, + "rest": { + "libraryClient": "PredictionServiceClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + } + } + }, "RetrieverService": { "clients": { "grpc": { diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py index 1699c98da708..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py new file mode 100644 index 000000000000..6c64cf5ad1c0 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import PredictionServiceAsyncClient +from .client import PredictionServiceClient + +__all__ = ( + "PredictionServiceClient", + "PredictionServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py new file mode 100644 index 000000000000..f9e04e3e2aea --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/async_client.py @@ -0,0 +1,391 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .client import PredictionServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport + + +class PredictionServiceAsyncClient: + """A service for online predictions and explanations.""" + + _client: PredictionServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = PredictionServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = PredictionServiceClient._DEFAULT_UNIVERSE + + model_path = staticmethod(PredictionServiceClient.model_path) + parse_model_path = staticmethod(PredictionServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + PredictionServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PredictionServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PredictionServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + PredictionServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + PredictionServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PredictionServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(PredictionServiceClient.common_project_path) + parse_common_project_path = staticmethod( + PredictionServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(PredictionServiceClient.common_location_path) + parse_common_location_path = staticmethod( + PredictionServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceAsyncClient: The constructed client. + """ + return PredictionServiceClient.from_service_account_info.__func__(PredictionServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceAsyncClient: The constructed client. + """ + return PredictionServiceClient.from_service_account_file.__func__(PredictionServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PredictionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PredictionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PredictionServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = PredictionServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PredictionServiceTransport, + Callable[..., PredictionServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the prediction service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PredictionServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PredictionServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def predict( + self, + request: Optional[Union[prediction_service.PredictRequest, dict]] = None, + *, + model: Optional[str] = None, + instances: Optional[MutableSequence[struct_pb2.Value]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> prediction_service.PredictResponse: + r"""Performs a prediction request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + async def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceAsyncClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = await client.predict(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1beta.types.PredictRequest, dict]]): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + model (:class:`str`): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances (:class:`MutableSequence[google.protobuf.struct_pb2.Value]`): + Required. The instances that are the + input to the prediction call. + + This corresponds to the ``instances`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.PredictResponse: + Response message for [PredictionService.Predict]. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, instances]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, prediction_service.PredictRequest): + request = prediction_service.PredictRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if instances: + request.instances.extend(instances) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.predict] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PredictionServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PredictionServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py new file mode 100644 index 000000000000..48736239098d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py @@ -0,0 +1,814 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .transports.base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .transports.grpc import PredictionServiceGrpcTransport +from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport +from .transports.rest import PredictionServiceRestTransport + + +class PredictionServiceClientMeta(type): + """Metaclass for the PredictionService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[PredictionServiceTransport]] + _transport_registry["grpc"] = PredictionServiceGrpcTransport + _transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport + _transport_registry["rest"] = PredictionServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[PredictionServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PredictionServiceClient(metaclass=PredictionServiceClientMeta): + """A service for online predictions and explanations.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PredictionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PredictionServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = PredictionServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or PredictionServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PredictionServiceTransport, + Callable[..., PredictionServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the prediction service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PredictionServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = PredictionServiceClient._read_environment_variables() + self._client_cert_source = PredictionServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = PredictionServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, PredictionServiceTransport) + if transport_provided: + # transport is a PredictionServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(PredictionServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or PredictionServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[PredictionServiceTransport], + Callable[..., PredictionServiceTransport], + ] = ( + PredictionServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., PredictionServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def predict( + self, + request: Optional[Union[prediction_service.PredictRequest, dict]] = None, + *, + model: Optional[str] = None, + instances: Optional[MutableSequence[struct_pb2.Value]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> prediction_service.PredictResponse: + r"""Performs a prediction request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1beta + + def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = client.predict(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1beta.types.PredictRequest, dict]): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + model (str): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances (MutableSequence[google.protobuf.struct_pb2.Value]): + Required. The instances that are the + input to the prediction call. + + This corresponds to the ``instances`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.ai.generativelanguage_v1beta.types.PredictResponse: + Response message for [PredictionService.Predict]. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, instances]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, prediction_service.PredictRequest): + request = prediction_service.PredictRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if instances is not None: + request.instances.extend(instances) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.predict] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PredictionServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PredictionServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py new file mode 100644 index 000000000000..d6d645ba1ff1 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PredictionServiceTransport +from .grpc import PredictionServiceGrpcTransport +from .grpc_asyncio import PredictionServiceGrpcAsyncIOTransport +from .rest import PredictionServiceRestInterceptor, PredictionServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PredictionServiceTransport]] +_transport_registry["grpc"] = PredictionServiceGrpcTransport +_transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport +_transport_registry["rest"] = PredictionServiceRestTransport + +__all__ = ( + "PredictionServiceTransport", + "PredictionServiceGrpcTransport", + "PredictionServiceGrpcAsyncIOTransport", + "PredictionServiceRestTransport", + "PredictionServiceRestInterceptor", +) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py similarity index 81% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/base.py rename to packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py index 48a3880b9b11..1b36658ad423 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/label_service/transports/base.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/base.py @@ -25,20 +25,20 @@ from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore -from google.ads.admanager_v1 import gapic_version as package_version -from google.ads.admanager_v1.types import label_service +from google.ai.generativelanguage_v1beta import gapic_version as package_version +from google.ai.generativelanguage_v1beta.types import prediction_service DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ ) -class LabelServiceTransport(abc.ABC): - """Abstract transport class for LabelService.""" +class PredictionServiceTransport(abc.ABC): + """Abstract transport class for PredictionService.""" AUTH_SCOPES = () - DEFAULT_HOST: str = "admanager.googleapis.com" + DEFAULT_HOST: str = "generativelanguage.googleapis.com" def __init__( self, @@ -57,7 +57,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to (default: 'admanager.googleapis.com'). + The hostname to connect to (default: 'generativelanguage.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -129,13 +129,8 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - self.get_label: gapic_v1.method.wrap_method( - self.get_label, - default_timeout=None, - client_info=client_info, - ), - self.list_labels: gapic_v1.method.wrap_method( - self.list_labels, + self.predict: gapic_v1.method.wrap_method( + self.predict, default_timeout=None, client_info=client_info, ), @@ -151,38 +146,20 @@ def close(self): raise NotImplementedError() @property - def get_label( + def predict( self, ) -> Callable[ - [label_service.GetLabelRequest], - Union[label_service.Label, Awaitable[label_service.Label]], - ]: - raise NotImplementedError() - - @property - def list_labels( - self, - ) -> Callable[ - [label_service.ListLabelsRequest], + [prediction_service.PredictRequest], Union[ - label_service.ListLabelsResponse, - Awaitable[label_service.ListLabelsResponse], + prediction_service.PredictResponse, + Awaitable[prediction_service.PredictResponse], ], ]: raise NotImplementedError() - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - @property def kind(self) -> str: raise NotImplementedError() -__all__ = ("LabelServiceTransport",) +__all__ = ("PredictionServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py new file mode 100644 index 000000000000..285c2ff8af46 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc.py @@ -0,0 +1,274 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO, PredictionServiceTransport + + +class PredictionServiceGrpcTransport(PredictionServiceTransport): + """gRPC backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], prediction_service.PredictResponse + ]: + r"""Return a callable for the predict method over gRPC. + + Performs a prediction request. + + Returns: + Callable[[~.PredictRequest], + ~.PredictResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "predict" not in self._stubs: + self._stubs["predict"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PredictionService/Predict", + request_serializer=prediction_service.PredictRequest.serialize, + response_deserializer=prediction_service.PredictResponse.deserialize, + ) + return self._stubs["predict"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("PredictionServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..1348f51f6706 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/grpc_asyncio.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .grpc import PredictionServiceGrpcTransport + + +class PredictionServiceGrpcAsyncIOTransport(PredictionServiceTransport): + """gRPC AsyncIO backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], + Awaitable[prediction_service.PredictResponse], + ]: + r"""Return a callable for the predict method over gRPC. + + Performs a prediction request. + + Returns: + Callable[[~.PredictRequest], + Awaitable[~.PredictResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "predict" not in self._stubs: + self._stubs["predict"] = self.grpc_channel.unary_unary( + "/google.ai.generativelanguage.v1beta.PredictionService/Predict", + request_serializer=prediction_service.PredictRequest.serialize, + response_deserializer=prediction_service.PredictResponse.deserialize, + ) + return self._stubs["predict"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.predict: gapic_v1.method_async.wrap_method( + self.predict, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("PredictionServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py new file mode 100644 index 000000000000..0fd462caa988 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py @@ -0,0 +1,313 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1beta.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import PredictionServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class PredictionServiceRestInterceptor: + """Interceptor for PredictionService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PredictionServiceRestTransport. + + .. code-block:: python + class MyCustomPredictionServiceInterceptor(PredictionServiceRestInterceptor): + def pre_predict(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_predict(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PredictionServiceRestTransport(interceptor=MyCustomPredictionServiceInterceptor()) + client = PredictionServiceClient(transport=transport) + + + """ + + def pre_predict( + self, + request: prediction_service.PredictRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[prediction_service.PredictRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for predict + + Override in a subclass to manipulate the request or metadata + before they are sent to the PredictionService server. + """ + return request, metadata + + def post_predict( + self, response: prediction_service.PredictResponse + ) -> prediction_service.PredictResponse: + """Post-rpc interceptor for predict + + Override in a subclass to manipulate the response + after it is returned by the PredictionService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PredictionServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PredictionServiceRestInterceptor + + +class PredictionServiceRestTransport(PredictionServiceTransport): + """REST backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PredictionServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PredictionServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Predict(PredictionServiceRestStub): + def __hash__(self): + return hash("Predict") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: prediction_service.PredictRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> prediction_service.PredictResponse: + r"""Call the predict method over HTTP. + + Args: + request (~.prediction_service.PredictRequest): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.prediction_service.PredictResponse: + Response message for [PredictionService.Predict]. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{model=models/*}:predict", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_predict(request, metadata) + pb_request = prediction_service.PredictRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = prediction_service.PredictResponse() + pb_resp = prediction_service.PredictResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_predict(resp) + return resp + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], prediction_service.PredictResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Predict(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PredictionServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py index 89b4f8ad01b8..9dd7a564142d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py @@ -28,12 +28,14 @@ CodeExecution, CodeExecutionResult, Content, + DynamicRetrievalConfig, ExecutableCode, FileData, FunctionCall, FunctionCallingConfig, FunctionDeclaration, FunctionResponse, + GoogleSearchRetrieval, GroundingPassage, GroundingPassages, Part, @@ -76,6 +78,13 @@ GenerateContentResponse, GenerationConfig, GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + RetrievalMetadata, + SearchEntryPoint, + Segment, SemanticRetrieverConfig, TaskType, ) @@ -103,6 +112,7 @@ TransferOwnershipResponse, UpdatePermissionRequest, ) +from .prediction_service import PredictRequest, PredictResponse from .retriever import ( Chunk, ChunkData, @@ -188,12 +198,14 @@ "CodeExecution", "CodeExecutionResult", "Content", + "DynamicRetrievalConfig", "ExecutableCode", "FileData", "FunctionCall", "FunctionCallingConfig", "FunctionDeclaration", "FunctionResponse", + "GoogleSearchRetrieval", "GroundingPassage", "GroundingPassages", "Part", @@ -231,6 +243,13 @@ "GenerateContentResponse", "GenerationConfig", "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", + "GroundingSupport", + "LogprobsResult", + "RetrievalMetadata", + "SearchEntryPoint", + "Segment", "SemanticRetrieverConfig", "TaskType", "Model", @@ -253,6 +272,8 @@ "TransferOwnershipRequest", "TransferOwnershipResponse", "UpdatePermissionRequest", + "PredictRequest", + "PredictResponse", "Chunk", "ChunkData", "Condition", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py index bbdbf7f24bc8..6b5d37cd15ce 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py @@ -31,6 +31,8 @@ "ExecutableCode", "CodeExecutionResult", "Tool", + "GoogleSearchRetrieval", + "DynamicRetrievalConfig", "CodeExecution", "ToolConfig", "FunctionCallingConfig", @@ -354,14 +356,18 @@ class Tool(proto.Message): The model or system does not execute the function. Instead the defined function may be returned as a - [FunctionCall][content.part.function_call] with arguments to - the client side for execution. The model may decide to call - a subset of these functions by populating - [FunctionCall][content.part.function_call] in the response. - The next conversation turn may contain a - [FunctionResponse][content.part.function_response] with the - [content.role] "function" generation context for the next - model turn. + [FunctionCall][google.ai.generativelanguage.v1beta.Part.function_call] + with arguments to the client side for execution. The model + may decide to call a subset of these functions by populating + [FunctionCall][google.ai.generativelanguage.v1beta.Part.function_call] + in the response. The next conversation turn may contain a + [FunctionResponse][google.ai.generativelanguage.v1beta.Part.function_response] + with the + [Content.role][google.ai.generativelanguage.v1beta.Content.role] + "function" generation context for the next model turn. + google_search_retrieval (google.ai.generativelanguage_v1beta.types.GoogleSearchRetrieval): + Optional. Retrieval tool that is powered by + Google search. code_execution (google.ai.generativelanguage_v1beta.types.CodeExecution): Optional. Enables the model to execute code as part of generation. @@ -372,6 +378,11 @@ class Tool(proto.Message): number=1, message="FunctionDeclaration", ) + google_search_retrieval: "GoogleSearchRetrieval" = proto.Field( + proto.MESSAGE, + number=2, + message="GoogleSearchRetrieval", + ) code_execution: "CodeExecution" = proto.Field( proto.MESSAGE, number=3, @@ -379,6 +390,65 @@ class Tool(proto.Message): ) +class GoogleSearchRetrieval(proto.Message): + r"""Tool to retrieve public web data for grounding, powered by + Google. + + Attributes: + dynamic_retrieval_config (google.ai.generativelanguage_v1beta.types.DynamicRetrievalConfig): + Specifies the dynamic retrieval configuration + for the given source. + """ + + dynamic_retrieval_config: "DynamicRetrievalConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="DynamicRetrievalConfig", + ) + + +class DynamicRetrievalConfig(proto.Message): + r"""Describes the options to customize dynamic retrieval. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + mode (google.ai.generativelanguage_v1beta.types.DynamicRetrievalConfig.Mode): + The mode of the predictor to be used in + dynamic retrieval. + dynamic_threshold (float): + The threshold to be used in dynamic + retrieval. If not set, a system default value is + used. + + This field is a member of `oneof`_ ``_dynamic_threshold``. + """ + + class Mode(proto.Enum): + r"""The mode of the predictor to be used in dynamic retrieval. + + Values: + MODE_UNSPECIFIED (0): + Always trigger retrieval. + MODE_DYNAMIC (1): + Run retrieval only when system decides it is + necessary. + """ + MODE_UNSPECIFIED = 0 + MODE_DYNAMIC = 1 + + mode: Mode = proto.Field( + proto.ENUM, + number=1, + enum=Mode, + ) + dynamic_threshold: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class CodeExecution(proto.Message): r"""Tool that executes code generated by the model, and automatically returns the result to the model. @@ -608,6 +678,9 @@ class Schema(proto.Message): max_items (int): Optional. Maximum number of the elements for Type.ARRAY. + min_items (int): + Optional. Minimum number of the elements for + Type.ARRAY. properties (MutableMapping[str, google.ai.generativelanguage_v1beta.types.Schema]): Optional. Properties of Type.OBJECT. required (MutableSequence[str]): @@ -645,6 +718,10 @@ class Schema(proto.Message): proto.INT64, number=21, ) + min_items: int = proto.Field( + proto.INT64, + number=22, + ) properties: MutableMapping[str, "Schema"] = proto.MapField( proto.STRING, proto.MESSAGE, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py index b31b07aa4299..edc4c8ec0ff8 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py @@ -32,8 +32,15 @@ "SemanticRetrieverConfig", "GenerateContentResponse", "Candidate", + "LogprobsResult", "AttributionSourceId", "GroundingAttribution", + "RetrievalMetadata", + "GroundingMetadata", + "SearchEntryPoint", + "GroundingChunk", + "Segment", + "GroundingSupport", "GenerateAnswerRequest", "GenerateAnswerResponse", "EmbedContentRequest", @@ -289,7 +296,8 @@ class GenerationConfig(proto.Message): Optional. MIME type of the generated candidate text. Supported MIME types are: ``text/plain``: (default) Text output. ``application/json``: JSON response in the response - candidates. Refer to the + candidates. ``text/x.enum``: ENUM as a string response in + the response candidates. Refer to the `docs `__ for a list of all supported text MIME types. response_schema (google.ai.generativelanguage_v1beta.types.Schema): @@ -303,6 +311,58 @@ class GenerationConfig(proto.Message): JSON response. Refer to the `JSON text generation guide `__ for more details. + presence_penalty (float): + Optional. Presence penalty applied to the next token's + logprobs if the token has already been seen in the response. + + This penalty is binary on/off and not dependant on the + number of times the token is used (after the first). Use + [frequency_penalty][google.ai.generativelanguage.v1beta.GenerationConfig.frequency_penalty] + for a penalty that increases with each use. + + A positive penalty will discourage the use of tokens that + have already been used in the response, increasing the + vocabulary. + + A negative penalty will encourage the use of tokens that + have already been used in the response, decreasing the + vocabulary. + + This field is a member of `oneof`_ ``_presence_penalty``. + frequency_penalty (float): + Optional. Frequency penalty applied to the next token's + logprobs, multiplied by the number of times each token has + been seen in the respponse so far. + + A positive penalty will discourage the use of tokens that + have already been used, proportional to the number of times + the token has been used: The more a token is used, the more + dificult it is for the model to use that token again + increasing the vocabulary of responses. + + Caution: A *negative* penalty will encourage the model to + reuse tokens proportional to the number of times the token + has been used. Small negative values will reduce the + vocabulary of a response. Larger negative values will cause + the model to start repeating a common token until it hits + the + [max_output_tokens][google.ai.generativelanguage.v1beta.GenerationConfig.max_output_tokens] + limit: "...the the the the the...". + + This field is a member of `oneof`_ ``_frequency_penalty``. + response_logprobs (bool): + Optional. If true, export the logprobs + results in response. + + This field is a member of `oneof`_ ``_response_logprobs``. + logprobs (int): + Optional. Only valid if + [response_logprobs=True][google.ai.generativelanguage.v1beta.GenerationConfig.response_logprobs]. + This sets the number of top logprobs to return at each + decoding step in the + [Candidate.logprobs_result][google.ai.generativelanguage.v1beta.Candidate.logprobs_result]. + + This field is a member of `oneof`_ ``_logprobs``. """ candidate_count: int = proto.Field( @@ -343,6 +403,26 @@ class GenerationConfig(proto.Message): number=14, message=gag_content.Schema, ) + presence_penalty: float = proto.Field( + proto.FLOAT, + number=15, + optional=True, + ) + frequency_penalty: float = proto.Field( + proto.FLOAT, + number=16, + optional=True, + ) + response_logprobs: bool = proto.Field( + proto.BOOL, + number=17, + optional=True, + ) + logprobs: int = proto.Field( + proto.INT32, + number=18, + optional=True, + ) class SemanticRetrieverConfig(proto.Message): @@ -565,6 +645,15 @@ class Candidate(proto.Message): contributed to a grounded answer. This field is populated for ``GenerateAnswer`` calls. + grounding_metadata (google.ai.generativelanguage_v1beta.types.GroundingMetadata): + Output only. Grounding metadata for the candidate. + + This field is populated for ``GenerateContent`` calls. + avg_logprobs (float): + Output only. + logprobs_result (google.ai.generativelanguage_v1beta.types.LogprobsResult): + Output only. Log-likelihood scores for the + response tokens and top tokens """ class FinishReason(proto.Enum): @@ -652,6 +741,94 @@ class FinishReason(proto.Enum): number=8, message="GroundingAttribution", ) + grounding_metadata: "GroundingMetadata" = proto.Field( + proto.MESSAGE, + number=9, + message="GroundingMetadata", + ) + avg_logprobs: float = proto.Field( + proto.DOUBLE, + number=10, + ) + logprobs_result: "LogprobsResult" = proto.Field( + proto.MESSAGE, + number=11, + message="LogprobsResult", + ) + + +class LogprobsResult(proto.Message): + r"""Logprobs Result + + Attributes: + top_candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.LogprobsResult.TopCandidates]): + Length = total number of decoding steps. + chosen_candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.LogprobsResult.Candidate]): + Length = total number of decoding steps. The chosen + candidates may or may not be in top_candidates. + """ + + class Candidate(proto.Message): + r"""Candidate for the logprobs token and score. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + token (str): + The candidate’s token string value. + + This field is a member of `oneof`_ ``_token``. + token_id (int): + The candidate’s token id value. + + This field is a member of `oneof`_ ``_token_id``. + log_probability (float): + The candidate's log probability. + + This field is a member of `oneof`_ ``_log_probability``. + """ + + token: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + token_id: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + log_probability: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class TopCandidates(proto.Message): + r"""Candidates with top log probabilities at each decoding step. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1beta.types.LogprobsResult.Candidate]): + Sorted by log probability in descending + order. + """ + + candidates: MutableSequence["LogprobsResult.Candidate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogprobsResult.Candidate", + ) + + top_candidates: MutableSequence[TopCandidates] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=TopCandidates, + ) + chosen_candidates: MutableSequence[Candidate] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Candidate, + ) class AttributionSourceId(proto.Message): @@ -758,6 +935,219 @@ class GroundingAttribution(proto.Message): ) +class RetrievalMetadata(proto.Message): + r"""Metadata related to retrieval in the grounding flow. + + Attributes: + google_search_dynamic_retrieval_score (float): + Optional. Score indicating how likely information from + google search could help answer the prompt. The score is in + the range [0, 1], where 0 is the least likely and 1 is the + most likely. This score is only populated when google search + grounding and dynamic retrieval is enabled. It will be + compared to the threshold to determine whether to trigger + google search. + """ + + google_search_dynamic_retrieval_score: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class GroundingMetadata(proto.Message): + r"""Metadata returned to client when grounding is enabled. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + search_entry_point (google.ai.generativelanguage_v1beta.types.SearchEntryPoint): + Optional. Google search entry for the + following-up web searches. + + This field is a member of `oneof`_ ``_search_entry_point``. + grounding_chunks (MutableSequence[google.ai.generativelanguage_v1beta.types.GroundingChunk]): + List of supporting references retrieved from + specified grounding source. + grounding_supports (MutableSequence[google.ai.generativelanguage_v1beta.types.GroundingSupport]): + List of grounding support. + retrieval_metadata (google.ai.generativelanguage_v1beta.types.RetrievalMetadata): + Metadata related to retrieval in the + grounding flow. + + This field is a member of `oneof`_ ``_retrieval_metadata``. + """ + + search_entry_point: "SearchEntryPoint" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="SearchEntryPoint", + ) + grounding_chunks: MutableSequence["GroundingChunk"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="GroundingChunk", + ) + grounding_supports: MutableSequence["GroundingSupport"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="GroundingSupport", + ) + retrieval_metadata: "RetrievalMetadata" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="RetrievalMetadata", + ) + + +class SearchEntryPoint(proto.Message): + r"""Google search entry point. + + Attributes: + rendered_content (str): + Optional. Web content snippet that can be + embedded in a web page or an app webview. + sdk_blob (bytes): + Optional. Base64 encoded JSON representing + array of tuple. + """ + + rendered_content: str = proto.Field( + proto.STRING, + number=1, + ) + sdk_blob: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class GroundingChunk(proto.Message): + r"""Grounding chunk. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + web (google.ai.generativelanguage_v1beta.types.GroundingChunk.Web): + Grounding chunk from the web. + + This field is a member of `oneof`_ ``chunk_type``. + """ + + class Web(proto.Message): + r"""Chunk from the web. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + URI reference of the chunk. + + This field is a member of `oneof`_ ``_uri``. + title (str): + Title of the chunk. + + This field is a member of `oneof`_ ``_title``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + title: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + web: Web = proto.Field( + proto.MESSAGE, + number=1, + oneof="chunk_type", + message=Web, + ) + + +class Segment(proto.Message): + r"""Segment of the content. + + Attributes: + part_index (int): + Output only. The index of a Part object + within its parent Content object. + start_index (int): + Output only. Start index in the given Part, + measured in bytes. Offset from the start of the + Part, inclusive, starting at zero. + end_index (int): + Output only. End index in the given Part, + measured in bytes. Offset from the start of the + Part, exclusive, starting at zero. + text (str): + Output only. The text corresponding to the + segment from the response. + """ + + part_index: int = proto.Field( + proto.INT32, + number=1, + ) + start_index: int = proto.Field( + proto.INT32, + number=2, + ) + end_index: int = proto.Field( + proto.INT32, + number=3, + ) + text: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GroundingSupport(proto.Message): + r"""Grounding support. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + segment (google.ai.generativelanguage_v1beta.types.Segment): + Segment of the content this support belongs + to. + + This field is a member of `oneof`_ ``_segment``. + grounding_chunk_indices (MutableSequence[int]): + A list of indices (into 'grounding_chunk') specifying the + citations associated with the claim. For instance [1,3,4] + means that grounding_chunk[1], grounding_chunk[3], + grounding_chunk[4] are the retrieved content attributed to + the claim. + confidence_scores (MutableSequence[float]): + Confidence score of the support references. Ranges from 0 to + 1. 1 is the most confident. This list must have the same + size as the grounding_chunk_indices. + """ + + segment: "Segment" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="Segment", + ) + grounding_chunk_indices: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=2, + ) + confidence_scores: MutableSequence[float] = proto.RepeatedField( + proto.FLOAT, + number=3, + ) + + class GenerateAnswerRequest(proto.Message): r"""Request to generate a grounded answer from the ``Model``. @@ -1207,7 +1597,7 @@ class CountTokensResponse(proto.Message): ``prompt`` into. Always non-negative. cached_content_token_count (int): Number of tokens in the cached part of the - prompt, i.e. in the cached content. + prompt (the cached content). """ total_tokens: int = proto.Field( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py new file mode 100644 index 000000000000..b6a659782edf --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/prediction_service.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1beta", + manifest={ + "PredictRequest", + "PredictResponse", + }, +) + + +class PredictRequest(proto.Message): + r"""Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1beta.PredictionService.Predict]. + + Attributes: + model (str): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + instances (MutableSequence[google.protobuf.struct_pb2.Value]): + Required. The instances that are the input to + the prediction call. + parameters (google.protobuf.struct_pb2.Value): + Optional. The parameters that govern the + prediction call. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + instances: MutableSequence[struct_pb2.Value] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + parameters: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Value, + ) + + +class PredictResponse(proto.Message): + r"""Response message for [PredictionService.Predict]. + + Attributes: + predictions (MutableSequence[google.protobuf.struct_pb2.Value]): + The outputs of the prediction call. + """ + + predictions: MutableSequence[struct_pb2.Value] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct_pb2.Value, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py index 113590701d4b..8ede1042a0ac 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/safety.py @@ -41,31 +41,32 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_UNSPECIFIED (0): Category is unspecified. HARM_CATEGORY_DEROGATORY (1): - Negative or harmful comments targeting - identity and/or protected attribute. + **PaLM** - Negative or harmful comments targeting identity + and/or protected attribute. HARM_CATEGORY_TOXICITY (2): - Content that is rude, disrespectful, or - profane. + **PaLM** - Content that is rude, disrespectful, or profane. HARM_CATEGORY_VIOLENCE (3): - Describes scenarios depicting violence - against an individual or group, or general - descriptions of gore. + **PaLM** - Describes scenarios depicting violence against an + individual or group, or general descriptions of gore. HARM_CATEGORY_SEXUAL (4): - Contains references to sexual acts or other - lewd content. + **PaLM** - Contains references to sexual acts or other lewd + content. HARM_CATEGORY_MEDICAL (5): - Promotes unchecked medical advice. + **PaLM** - Promotes unchecked medical advice. HARM_CATEGORY_DANGEROUS (6): - Dangerous content that promotes, facilitates, - or encourages harmful acts. + **PaLM** - Dangerous content that promotes, facilitates, or + encourages harmful acts. HARM_CATEGORY_HARASSMENT (7): - Harasment content. + **Gemini** - Harassment content. HARM_CATEGORY_HATE_SPEECH (8): - Hate speech and content. + **Gemini** - Hate speech and content. HARM_CATEGORY_SEXUALLY_EXPLICIT (9): - Sexually explicit content. + **Gemini** - Sexually explicit content. HARM_CATEGORY_DANGEROUS_CONTENT (10): - Dangerous content. + **Gemini** - Dangerous content. + HARM_CATEGORY_CIVIC_INTEGRITY (11): + **Gemini** - Content that may be used to harm civic + integrity. """ HARM_CATEGORY_UNSPECIFIED = 0 HARM_CATEGORY_DEROGATORY = 1 @@ -78,6 +79,7 @@ class HarmCategory(proto.Enum): HARM_CATEGORY_HATE_SPEECH = 8 HARM_CATEGORY_SEXUALLY_EXPLICIT = 9 HARM_CATEGORY_DANGEROUS_CONTENT = 10 + HARM_CATEGORY_CIVIC_INTEGRITY = 11 class ContentFilter(proto.Message): @@ -249,12 +251,15 @@ class HarmBlockThreshold(proto.Enum): be allowed. BLOCK_NONE (4): All content will be allowed. + OFF (5): + Turn off the safety filter. """ HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 BLOCK_LOW_AND_ABOVE = 1 BLOCK_MEDIUM_AND_ABOVE = 2 BLOCK_ONLY_HIGH = 3 BLOCK_NONE = 4 + OFF = 5 category: "HarmCategory" = proto.Field( proto.ENUM, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py index a45283f33632..3be0f7e6b586 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/tuned_model.py @@ -118,6 +118,9 @@ class TunedModel(proto.Message): tuning_task (google.ai.generativelanguage_v1beta.types.TuningTask): Required. The tuning task that creates the tuned model. + reader_project_numbers (MutableSequence[int]): + Optional. List of project numbers that have + read access to the tuned model. """ class State(proto.Enum): @@ -196,6 +199,10 @@ class State(proto.Enum): number=10, message="TuningTask", ) + reader_project_numbers: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=14, + ) class TunedModelSource(proto.Message): diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py index 1699c98da708..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py index 1699c98da708..8ebdaa033b52 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.9" # {x-release-please-version} +__version__ = "0.6.10" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py new file mode 100644 index 000000000000..851ebfa44e4d --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Predict +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PredictionService_Predict_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +async def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceAsyncClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = await client.predict(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PredictionService_Predict_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py new file mode 100644 index 000000000000..ade0be26d986 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1beta_generated_prediction_service_predict_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Predict +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1beta_generated_PredictionService_Predict_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1beta + + +def sample_predict(): + # Create a client + client = generativelanguage_v1beta.PredictionServiceClient() + + # Initialize request argument(s) + instances = generativelanguage_v1beta.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1beta.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = client.predict(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1beta_generated_PredictionService_Predict_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json index dcb6ad5e6a9e..416353581730 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.9" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json index a6e1502bce78..a2110fd118ef 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.9" + "version": "0.6.10" }, "snippets": [ { @@ -4953,6 +4953,175 @@ ], "title": "generativelanguage_v1beta_generated_permission_service_update_permission_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceAsyncClient", + "shortName": "PredictionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceAsyncClient.predict", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService.Predict", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService", + "shortName": "PredictionService" + }, + "shortName": "Predict" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.PredictRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "instances", + "type": "MutableSequence[google.protobuf.struct_pb2.Value]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.PredictResponse", + "shortName": "predict" + }, + "description": "Sample for Predict", + "file": "generativelanguage_v1beta_generated_prediction_service_predict_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PredictionService_Predict_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_prediction_service_predict_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceClient", + "shortName": "PredictionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1beta.PredictionServiceClient.predict", + "method": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService.Predict", + "service": { + "fullName": "google.ai.generativelanguage.v1beta.PredictionService", + "shortName": "PredictionService" + }, + "shortName": "Predict" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1beta.types.PredictRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "instances", + "type": "MutableSequence[google.protobuf.struct_pb2.Value]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.ai.generativelanguage_v1beta.types.PredictResponse", + "shortName": "predict" + }, + "description": "Sample for Predict", + "file": "generativelanguage_v1beta_generated_prediction_service_predict_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1beta_generated_PredictionService_Predict_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1beta_generated_prediction_service_predict_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json index d3fc92d09eaa..865de14ffa13 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.9" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json index d9c470b9e07c..7fbde27c9197 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.9" + "version": "0.6.10" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py index dcb5cdfbb55c..8e69225c75de 100644 --- a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py +++ b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1beta_keywords.py @@ -83,6 +83,7 @@ class generativelanguageCallTransformer(cst.CSTTransformer): 'list_models': ('page_size', 'page_token', ), 'list_permissions': ('parent', 'page_size', 'page_token', ), 'list_tuned_models': ('page_size', 'page_token', 'filter', ), + 'predict': ('model', 'instances', 'parameters', ), 'query_corpus': ('name', 'query', 'metadata_filters', 'results_count', ), 'query_document': ('name', 'query', 'results_count', 'metadata_filters', ), 'stream_generate_content': ('model', 'contents', 'system_instruction', 'tools', 'tool_config', 'safety_settings', 'generation_config', 'cached_content', ), diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py index 46ecbbbaeafa..1cc7b01d0c19 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py @@ -3349,11 +3349,15 @@ def test_create_cached_content_rest(request_type): "enum": ["enum_value1", "enum_value2"], "items": {}, "max_items": 967, + "min_items": 965, "properties": {}, "required": ["required_value1", "required_value2"], }, } ], + "google_search_retrieval": { + "dynamic_retrieval_config": {"mode": 1, "dynamic_threshold": 0.1809} + }, "code_execution": {}, } ], @@ -4097,11 +4101,15 @@ def test_update_cached_content_rest(request_type): "enum": ["enum_value1", "enum_value2"], "items": {}, "max_items": 967, + "min_items": 965, "properties": {}, "required": ["required_value1", "required_value2"], }, } ], + "google_search_retrieval": { + "dynamic_retrieval_config": {"mode": 1, "dynamic_threshold": 0.1809} + }, "code_execution": {}, } ], diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py index c8b4aed2becb..93ee6b5f5eb4 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py @@ -2059,6 +2059,7 @@ def test_get_tuned_model(request_type, transport: str = "grpc"): top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) response = client.get_tuned_model(request) @@ -2078,6 +2079,7 @@ def test_get_tuned_model(request_type, transport: str = "grpc"): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_get_tuned_model_empty_call(): @@ -2183,6 +2185,7 @@ async def test_get_tuned_model_empty_call_async(): top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.get_tuned_model() @@ -2258,6 +2261,7 @@ async def test_get_tuned_model_async( top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.get_tuned_model(request) @@ -2277,6 +2281,7 @@ async def test_get_tuned_model_async( assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] @pytest.mark.asyncio @@ -3346,6 +3351,7 @@ def test_update_tuned_model(request_type, transport: str = "grpc"): top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) response = client.update_tuned_model(request) @@ -3365,6 +3371,7 @@ def test_update_tuned_model(request_type, transport: str = "grpc"): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_update_tuned_model_empty_call(): @@ -3476,6 +3483,7 @@ async def test_update_tuned_model_empty_call_async(): top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.update_tuned_model() @@ -3553,6 +3561,7 @@ async def test_update_tuned_model_async( top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], ) ) response = await client.update_tuned_model(request) @@ -3572,6 +3581,7 @@ async def test_update_tuned_model_async( assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] @pytest.mark.asyncio @@ -4757,6 +4767,7 @@ def test_get_tuned_model_rest(request_type): top_p=0.546, top_k=541, state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) @@ -4780,6 +4791,7 @@ def test_get_tuned_model_rest(request_type): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_get_tuned_model_rest_use_cached_wrapped_rpc(): @@ -5372,6 +5384,7 @@ def test_create_tuned_model_rest(request_type): "batch_size": 1052, }, }, + "reader_project_numbers": [2341, 2342], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5784,6 +5797,7 @@ def test_update_tuned_model_rest(request_type): "batch_size": 1052, }, }, + "reader_project_numbers": [2341, 2342], } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -5865,6 +5879,7 @@ def get_message_fields(field): top_p=0.546, top_k=541, state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], base_model="base_model_value", ) @@ -5888,6 +5903,7 @@ def get_message_fields(field): assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) assert response.top_k == 541 assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] def test_update_tuned_model_rest_use_cached_wrapped_rpc(): diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_creative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py similarity index 55% rename from packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_creative_service.py rename to packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py index 1fae403e50f6..5532205c7f0a 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_creative_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py @@ -36,7 +36,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import json_format -from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -45,12 +45,12 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.ads.admanager_v1.services.creative_service import ( - CreativeServiceClient, - pagers, +from google.ai.generativelanguage_v1beta.services.prediction_service import ( + PredictionServiceAsyncClient, + PredictionServiceClient, transports, ) -from google.ads.admanager_v1.types import ad_partner_declaration, creative_service +from google.ai.generativelanguage_v1beta.types import prediction_service def client_cert_source_callback(): @@ -86,40 +86,45 @@ def test__get_default_mtls_endpoint(): sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" - assert CreativeServiceClient._get_default_mtls_endpoint(None) is None + assert PredictionServiceClient._get_default_mtls_endpoint(None) is None assert ( - CreativeServiceClient._get_default_mtls_endpoint(api_endpoint) + PredictionServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint ) assert ( - CreativeServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + PredictionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint ) assert ( - CreativeServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + PredictionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint ) assert ( - CreativeServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + PredictionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint ) assert ( - CreativeServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + PredictionServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi ) def test__read_environment_variables(): - assert CreativeServiceClient._read_environment_variables() == (False, "auto", None) + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert CreativeServiceClient._read_environment_variables() == ( + assert PredictionServiceClient._read_environment_variables() == ( True, "auto", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert CreativeServiceClient._read_environment_variables() == ( + assert PredictionServiceClient._read_environment_variables() == ( False, "auto", None, @@ -129,28 +134,28 @@ def test__read_environment_variables(): os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError) as excinfo: - CreativeServiceClient._read_environment_variables() + PredictionServiceClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert CreativeServiceClient._read_environment_variables() == ( + assert PredictionServiceClient._read_environment_variables() == ( False, "never", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert CreativeServiceClient._read_environment_variables() == ( + assert PredictionServiceClient._read_environment_variables() == ( False, "always", None, ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert CreativeServiceClient._read_environment_variables() == ( + assert PredictionServiceClient._read_environment_variables() == ( False, "auto", None, @@ -158,14 +163,14 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: - CreativeServiceClient._read_environment_variables() + PredictionServiceClient._read_environment_variables() assert ( str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert CreativeServiceClient._read_environment_variables() == ( + assert PredictionServiceClient._read_environment_variables() == ( False, "auto", "foo.com", @@ -176,13 +181,15 @@ def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() - assert CreativeServiceClient._get_client_cert_source(None, False) is None + assert PredictionServiceClient._get_client_cert_source(None, False) is None assert ( - CreativeServiceClient._get_client_cert_source(mock_provided_cert_source, False) + PredictionServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) is None ) assert ( - CreativeServiceClient._get_client_cert_source(mock_provided_cert_source, True) + PredictionServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source ) @@ -194,11 +201,11 @@ def test__get_client_cert_source(): return_value=mock_default_cert_source, ): assert ( - CreativeServiceClient._get_client_cert_source(None, True) + PredictionServiceClient._get_client_cert_source(None, True) is mock_default_cert_source ) assert ( - CreativeServiceClient._get_client_cert_source( + PredictionServiceClient._get_client_cert_source( mock_provided_cert_source, "true" ) is mock_provided_cert_source @@ -206,59 +213,66 @@ def test__get_client_cert_source(): @mock.patch.object( - CreativeServiceClient, + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(CreativeServiceClient), + modify_default_endpoint_template(PredictionServiceAsyncClient), ) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() - default_universe = CreativeServiceClient._DEFAULT_UNIVERSE - default_endpoint = CreativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = CreativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) assert ( - CreativeServiceClient._get_api_endpoint( + PredictionServiceClient._get_api_endpoint( api_override, mock_client_cert_source, default_universe, "always" ) == api_override ) assert ( - CreativeServiceClient._get_api_endpoint( + PredictionServiceClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "auto" ) - == CreativeServiceClient.DEFAULT_MTLS_ENDPOINT + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - CreativeServiceClient._get_api_endpoint(None, None, default_universe, "auto") + PredictionServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint ) assert ( - CreativeServiceClient._get_api_endpoint(None, None, default_universe, "always") - == CreativeServiceClient.DEFAULT_MTLS_ENDPOINT + PredictionServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - CreativeServiceClient._get_api_endpoint( + PredictionServiceClient._get_api_endpoint( None, mock_client_cert_source, default_universe, "always" ) - == CreativeServiceClient.DEFAULT_MTLS_ENDPOINT + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT ) assert ( - CreativeServiceClient._get_api_endpoint(None, None, mock_universe, "never") + PredictionServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint ) assert ( - CreativeServiceClient._get_api_endpoint(None, None, default_universe, "never") + PredictionServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint ) with pytest.raises(MutualTLSChannelError) as excinfo: - CreativeServiceClient._get_api_endpoint( + PredictionServiceClient._get_api_endpoint( None, mock_client_cert_source, mock_universe, "auto" ) assert ( @@ -272,29 +286,30 @@ def test__get_universe_domain(): universe_domain_env = "bar.com" assert ( - CreativeServiceClient._get_universe_domain( + PredictionServiceClient._get_universe_domain( client_universe_domain, universe_domain_env ) == client_universe_domain ) assert ( - CreativeServiceClient._get_universe_domain(None, universe_domain_env) + PredictionServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env ) assert ( - CreativeServiceClient._get_universe_domain(None, None) - == CreativeServiceClient._DEFAULT_UNIVERSE + PredictionServiceClient._get_universe_domain(None, None) + == PredictionServiceClient._DEFAULT_UNIVERSE ) with pytest.raises(ValueError) as excinfo: - CreativeServiceClient._get_universe_domain("", None) + PredictionServiceClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (CreativeServiceClient, transports.CreativeServiceRestTransport, "rest"), + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), ], ) def test__validate_universe_domain(client_class, transport_class, transport_name): @@ -373,10 +388,12 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (CreativeServiceClient, "rest"), + (PredictionServiceClient, "grpc"), + (PredictionServiceAsyncClient, "grpc_asyncio"), + (PredictionServiceClient, "rest"), ], ) -def test_creative_service_client_from_service_account_info( +def test_prediction_service_client_from_service_account_info( client_class, transport_name ): creds = ga_credentials.AnonymousCredentials() @@ -390,19 +407,21 @@ def test_creative_service_client_from_service_account_info( assert isinstance(client, client_class) assert client.transport._host == ( - "admanager.googleapis.com:443" + "generativelanguage.googleapis.com:443" if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" + else "https://generativelanguage.googleapis.com" ) @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.CreativeServiceRestTransport, "rest"), + (transports.PredictionServiceGrpcTransport, "grpc"), + (transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.PredictionServiceRestTransport, "rest"), ], ) -def test_creative_service_client_service_account_always_use_jwt( +def test_prediction_service_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -423,10 +442,12 @@ def test_creative_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (CreativeServiceClient, "rest"), + (PredictionServiceClient, "grpc"), + (PredictionServiceAsyncClient, "grpc_asyncio"), + (PredictionServiceClient, "rest"), ], ) -def test_creative_service_client_from_service_account_file( +def test_prediction_service_client_from_service_account_file( client_class, transport_name ): creds = ga_credentials.AnonymousCredentials() @@ -447,45 +468,57 @@ def test_creative_service_client_from_service_account_file( assert isinstance(client, client_class) assert client.transport._host == ( - "admanager.googleapis.com:443" + "generativelanguage.googleapis.com:443" if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" + else "https://generativelanguage.googleapis.com" ) -def test_creative_service_client_get_transport_class(): - transport = CreativeServiceClient.get_transport_class() +def test_prediction_service_client_get_transport_class(): + transport = PredictionServiceClient.get_transport_class() available_transports = [ - transports.CreativeServiceRestTransport, + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceRestTransport, ] assert transport in available_transports - transport = CreativeServiceClient.get_transport_class("rest") - assert transport == transports.CreativeServiceRestTransport + transport = PredictionServiceClient.get_transport_class("grpc") + assert transport == transports.PredictionServiceGrpcTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (CreativeServiceClient, transports.CreativeServiceRestTransport, "rest"), + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), ], ) @mock.patch.object( - CreativeServiceClient, + PredictionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(CreativeServiceClient), + modify_default_endpoint_template(PredictionServiceClient), ) -def test_creative_service_client_client_options( +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +def test_prediction_service_client_client_options( client_class, transport_class, transport_name ): # Check that if channel is provided we won't create a new one. - with mock.patch.object(CreativeServiceClient, "get_transport_class") as gtc: + with mock.patch.object(PredictionServiceClient, "get_transport_class") as gtc: transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CreativeServiceClient, "get_transport_class") as gtc: + with mock.patch.object(PredictionServiceClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() @@ -609,26 +642,55 @@ def test_creative_service_client_client_options( "client_class,transport_class,transport_name,use_client_cert_env", [ ( - CreativeServiceClient, - transports.CreativeServiceRestTransport, + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + "true", + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + "false", + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + PredictionServiceClient, + transports.PredictionServiceRestTransport, "rest", "true", ), ( - CreativeServiceClient, - transports.CreativeServiceRestTransport, + PredictionServiceClient, + transports.PredictionServiceRestTransport, "rest", "false", ), ], ) @mock.patch.object( - CreativeServiceClient, + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(CreativeServiceClient), + modify_default_endpoint_template(PredictionServiceAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_creative_service_client_mtls_env_auto( +def test_prediction_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env ): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -730,13 +792,20 @@ def test_creative_service_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [CreativeServiceClient]) +@pytest.mark.parametrize( + "client_class", [PredictionServiceClient, PredictionServiceAsyncClient] +) @mock.patch.object( - CreativeServiceClient, + PredictionServiceClient, "DEFAULT_ENDPOINT", - modify_default_endpoint(CreativeServiceClient), + modify_default_endpoint(PredictionServiceClient), ) -def test_creative_service_client_get_mtls_endpoint_and_cert_source(client_class): +@mock.patch.object( + PredictionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PredictionServiceAsyncClient), +) +def test_prediction_service_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". @@ -827,21 +896,28 @@ def test_creative_service_client_get_mtls_endpoint_and_cert_source(client_class) ) -@pytest.mark.parametrize("client_class", [CreativeServiceClient]) +@pytest.mark.parametrize( + "client_class", [PredictionServiceClient, PredictionServiceAsyncClient] +) +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) @mock.patch.object( - CreativeServiceClient, + PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(CreativeServiceClient), + modify_default_endpoint_template(PredictionServiceAsyncClient), ) -def test_creative_service_client_client_api_endpoint(client_class): +def test_prediction_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" - default_universe = CreativeServiceClient._DEFAULT_UNIVERSE - default_endpoint = CreativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=default_universe ) mock_universe = "bar.com" - mock_endpoint = CreativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( UNIVERSE_DOMAIN=mock_universe ) @@ -909,10 +985,16 @@ def test_creative_service_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (CreativeServiceClient, transports.CreativeServiceRestTransport, "rest"), + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), ], ) -def test_creative_service_client_client_options_scopes( +def test_prediction_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. @@ -940,10 +1022,27 @@ def test_creative_service_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - (CreativeServiceClient, transports.CreativeServiceRestTransport, "rest", None), + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + PredictionServiceClient, + transports.PredictionServiceRestTransport, + "rest", + None, + ), ], ) -def test_creative_service_client_client_options_credentials_file( +def test_prediction_service_client_client_options_credentials_file( client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. @@ -967,63 +1066,183 @@ def test_creative_service_client_client_options_credentials_file( ) +def test_prediction_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PredictionServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_prediction_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "request_type", [ - creative_service.GetCreativeRequest, + prediction_service.PredictRequest, dict, ], ) -def test_get_creative_rest(request_type): - client = CreativeServiceClient( +def test_predict(request_type, transport: str = "grpc"): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/creatives/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = creative_service.Creative( - name="name_value", - creative_id=1151, - display_name="display_name_value", - advertiser="advertiser_value", - preview_url="preview_url_value", - size_label="size_label_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() + response = client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = prediction_service.PredictRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, prediction_service.PredictResponse) + + +def test_predict_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.predict() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == prediction_service.PredictRequest() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = creative_service.Creative.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_creative(request) +def test_predict_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Establish that the response is the type that we expect. - assert isinstance(response, creative_service.Creative) - assert response.name == "name_value" - assert response.creative_id == 1151 - assert response.display_name == "display_name_value" - assert response.advertiser == "advertiser_value" - assert response.preview_url == "preview_url_value" - assert response.size_label == "size_label_value" + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = prediction_service.PredictRequest( + model="model_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.predict(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == prediction_service.PredictRequest( + model="model_value", + ) -def test_get_creative_rest_use_cached_wrapped_rpc(): +def test_predict_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -1031,300 +1250,319 @@ def test_get_creative_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_creative in client._transport._wrapped_methods + assert client._transport.predict in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_creative] = mock_rpc - + client._transport._wrapped_methods[client._transport.predict] = mock_rpc request = {} - client.get_creative(request) + client.predict(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_creative(request) + client.predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_creative_rest_required_fields( - request_type=creative_service.GetCreativeRequest, -): - transport_class = transports.CreativeServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_predict_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + response = await client.predict() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == prediction_service.PredictRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_creative._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["name"] = "name_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_creative._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.predict + in client._client._transport._wrapped_methods + ) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.predict + ] = mock_rpc + + request = {} + await client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - client = CreativeServiceClient( + await client.predict(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_predict_async( + transport: str = "grpc_asyncio", request_type=prediction_service.PredictRequest +): + client = PredictionServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = creative_service.Creative() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + response = await client.predict(request) - # Convert return value to protobuf type - return_value = creative_service.Creative.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = prediction_service.PredictRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, prediction_service.PredictResponse) - response = client.get_creative(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_predict_async_from_dict(): + await test_predict_async(request_type=dict) -def test_get_creative_rest_unset_required_fields(): - transport = transports.CreativeServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_predict_field_headers(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.get_creative._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = prediction_service.PredictRequest() + request.model = "model_value" -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_creative_rest_interceptors(null_interceptor): - transport = transports.CreativeServiceRestTransport( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value = prediction_service.PredictResponse() + client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_predict_field_headers_async(): + client = PredictionServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CreativeServiceRestInterceptor(), ) - client = CreativeServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.CreativeServiceRestInterceptor, "post_get_creative" - ) as post, mock.patch.object( - transports.CreativeServiceRestInterceptor, "pre_get_creative" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = creative_service.GetCreativeRequest.pb( - creative_service.GetCreativeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = creative_service.Creative.to_json( - creative_service.Creative() - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = prediction_service.PredictRequest() - request = creative_service.GetCreativeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = creative_service.Creative() + request.model = "model_value" - client.get_creative( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() ) + await client.predict(request) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] -def test_get_creative_rest_bad_request( - transport: str = "rest", request_type=creative_service.GetCreativeRequest -): - client = CreativeServiceClient( +def test_predict_flattened(): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"name": "networks/sample1/creatives/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_creative(request) - + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.predict( + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) -def test_get_creative_rest_flattened(): - client = CreativeServiceClient( + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].instances + mock_val = [struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)] + assert arg == mock_val + + +def test_predict_flattened_error(): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = creative_service.Creative() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) - # get arguments that satisfy an http rule for this method - sample_request = {"name": "networks/sample1/creatives/sample2"} - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) +@pytest.mark.asyncio +async def test_predict_flattened_async(): + client = PredictionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = creative_service.Creative.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() - client.get_creative(**mock_args) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.predict( + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=networks/*/creatives/*}" % client.transport._host, args[1] - ) - - -def test_get_creative_rest_flattened_error(transport: str = "rest"): - client = CreativeServiceClient( + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].instances + mock_val = [struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_predict_flattened_error_async(): + client = PredictionServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_creative( - creative_service.GetCreativeRequest(), - name="name_value", + await client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], ) -def test_get_creative_rest_error(): - client = CreativeServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - @pytest.mark.parametrize( "request_type", [ - creative_service.ListCreativesRequest, + prediction_service.PredictRequest, dict, ], ) -def test_list_creatives_rest(request_type): - client = CreativeServiceClient( +def test_predict_rest(request_type): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} + request_init = {"model": "models/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = creative_service.ListCreativesResponse( - next_page_token="next_page_token_value", - total_size=1086, - ) + return_value = prediction_service.PredictResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = creative_service.ListCreativesResponse.pb(return_value) + return_value = prediction_service.PredictResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_creatives(request) + response = client.predict(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCreativesPager) - assert response.next_page_token == "next_page_token_value" - assert response.total_size == 1086 + assert isinstance(response, prediction_service.PredictResponse) -def test_list_creatives_rest_use_cached_wrapped_rpc(): +def test_predict_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1334,35 +1572,33 @@ def test_list_creatives_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_creatives in client._transport._wrapped_methods + assert client._transport.predict in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_creatives] = mock_rpc + client._transport._wrapped_methods[client._transport.predict] = mock_rpc request = {} - client.list_creatives(request) + client.predict(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_creatives(request) + client.predict(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_creatives_rest_required_fields( - request_type=creative_service.ListCreativesRequest, -): - transport_class = transports.CreativeServiceRestTransport +def test_predict_rest_required_fields(request_type=prediction_service.PredictRequest): + transport_class = transports.PredictionServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["model"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -1373,40 +1609,30 @@ def test_list_creatives_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_creatives._get_unset_required_fields(jsonified_request) + ).predict._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["model"] = "model_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_creatives._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "skip", - ) - ) + ).predict._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = creative_service.ListCreativesResponse() + return_value = prediction_service.PredictResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1418,70 +1644,68 @@ def test_list_creatives_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = creative_service.ListCreativesResponse.pb(return_value) + return_value = prediction_service.PredictResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_creatives(request) + response = client.predict(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_creatives_rest_unset_required_fields(): - transport = transports.CreativeServiceRestTransport( +def test_predict_rest_unset_required_fields(): + transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_creatives._get_unset_required_fields({}) + unset_fields = transport.predict._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "skip", + "model", + "instances", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_creatives_rest_interceptors(null_interceptor): - transport = transports.CreativeServiceRestTransport( +def test_predict_rest_interceptors(null_interceptor): + transport = transports.PredictionServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor - else transports.CreativeServiceRestInterceptor(), + else transports.PredictionServiceRestInterceptor(), ) - client = CreativeServiceClient(transport=transport) + client = PredictionServiceClient(transport=transport) with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CreativeServiceRestInterceptor, "post_list_creatives" + transports.PredictionServiceRestInterceptor, "post_predict" ) as post, mock.patch.object( - transports.CreativeServiceRestInterceptor, "pre_list_creatives" + transports.PredictionServiceRestInterceptor, "pre_predict" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = creative_service.ListCreativesRequest.pb( - creative_service.ListCreativesRequest() + pb_message = prediction_service.PredictRequest.pb( + prediction_service.PredictRequest() ) transcode.return_value = { "method": "post", @@ -1493,19 +1717,19 @@ def test_list_creatives_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = creative_service.ListCreativesResponse.to_json( - creative_service.ListCreativesResponse() + req.return_value._content = prediction_service.PredictResponse.to_json( + prediction_service.PredictResponse() ) - request = creative_service.ListCreativesRequest() + request = prediction_service.PredictRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = creative_service.ListCreativesResponse() + post.return_value = prediction_service.PredictResponse() - client.list_creatives( + client.predict( request, metadata=[ ("key", "val"), @@ -1517,16 +1741,16 @@ def test_list_creatives_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_creatives_rest_bad_request( - transport: str = "rest", request_type=creative_service.ListCreativesRequest +def test_predict_rest_bad_request( + transport: str = "rest", request_type=prediction_service.PredictRequest ): - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {"parent": "networks/sample1"} + request_init = {"model": "models/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1538,11 +1762,11 @@ def test_list_creatives_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_creatives(request) + client.predict(request) -def test_list_creatives_rest_flattened(): - client = CreativeServiceClient( +def test_predict_rest_flattened(): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1550,14 +1774,15 @@ def test_list_creatives_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = creative_service.ListCreativesResponse() + return_value = prediction_service.PredictResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "networks/sample1"} + sample_request = {"model": "models/sample1"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], ) mock_args.update(sample_request) @@ -1565,24 +1790,24 @@ def test_list_creatives_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = creative_service.ListCreativesResponse.pb(return_value) + return_value = prediction_service.PredictResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_creatives(**mock_args) + client.predict(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=networks/*}/creatives" % client.transport._host, args[1] + "%s/v1beta/{model=models/*}:predict" % client.transport._host, args[1] ) -def test_list_creatives_rest_flattened_error(transport: str = "rest"): - client = CreativeServiceClient( +def test_predict_rest_flattened_error(transport: str = "rest"): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1590,104 +1815,48 @@ def test_list_creatives_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_creatives( - creative_service.ListCreativesRequest(), - parent="parent_value", + client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], ) -def test_list_creatives_rest_pager(transport: str = "rest"): - client = CreativeServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_predict_rest_error(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - creative_service.ListCreativesResponse( - creatives=[ - creative_service.Creative(), - creative_service.Creative(), - creative_service.Creative(), - ], - next_page_token="abc", - ), - creative_service.ListCreativesResponse( - creatives=[], - next_page_token="def", - ), - creative_service.ListCreativesResponse( - creatives=[ - creative_service.Creative(), - ], - next_page_token="ghi", - ), - creative_service.ListCreativesResponse( - creatives=[ - creative_service.Creative(), - creative_service.Creative(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - creative_service.ListCreativesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "networks/sample1"} - - pager = client.list_creatives(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, creative_service.Creative) for i in results) - - pages = list(client.list_creatives(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. - transport = transports.CreativeServiceRestTransport( + transport = transports.PredictionServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. - transport = transports.CreativeServiceRestTransport( + transport = transports.PredictionServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = CreativeServiceClient( + client = PredictionServiceClient( client_options={"credentials_file": "credentials.json"}, transport=transport, ) # It is an error to provide an api_key and a transport instance. - transport = transports.CreativeServiceRestTransport( + transport = transports.PredictionServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = CreativeServiceClient( + client = PredictionServiceClient( client_options=options, transport=transport, ) @@ -1696,16 +1865,16 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = CreativeServiceClient( + client = PredictionServiceClient( client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. - transport = transports.CreativeServiceRestTransport( + transport = transports.PredictionServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = CreativeServiceClient( + client = PredictionServiceClient( client_options={"scopes": ["1", "2"]}, transport=transport, ) @@ -1713,17 +1882,34 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. - transport = transports.CreativeServiceRestTransport( + transport = transports.PredictionServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) - client = CreativeServiceClient(transport=transport) + client = PredictionServiceClient(transport=transport) assert client.transport is transport +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PredictionServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + @pytest.mark.parametrize( "transport_class", [ - transports.CreativeServiceRestTransport, + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + transports.PredictionServiceRestTransport, ], ) def test_transport_adc(transport_class): @@ -1737,42 +1923,50 @@ def test_transport_adc(transport_class): @pytest.mark.parametrize( "transport_name", [ + "grpc", "rest", ], ) def test_transport_kind(transport_name): - transport = CreativeServiceClient.get_transport_class(transport_name)( + transport = PredictionServiceClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name -def test_creative_service_base_transport_error(): +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PredictionServiceGrpcTransport, + ) + + +def test_prediction_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CreativeServiceTransport( + transport = transports.PredictionServiceTransport( credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) -def test_creative_service_base_transport(): +def test_prediction_service_base_transport(): # Instantiate the base transport. with mock.patch( - "google.ads.admanager_v1.services.creative_service.transports.CreativeServiceTransport.__init__" + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceTransport.__init__" ) as Transport: Transport.return_value = None - transport = transports.CreativeServiceTransport( + transport = transports.PredictionServiceTransport( credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly # raise NotImplementedError. - methods = ( - "get_creative", - "list_creatives", - "get_operation", - ) + methods = ("predict",) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) @@ -1789,16 +1983,16 @@ def test_creative_service_base_transport(): getattr(transport, r)() -def test_creative_service_base_transport_with_credentials_file(): +def test_prediction_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( - "google.ads.admanager_v1.services.creative_service.transports.CreativeServiceTransport._prep_wrapped_messages" + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CreativeServiceTransport( + transport = transports.PredictionServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1810,22 +2004,22 @@ def test_creative_service_base_transport_with_credentials_file(): ) -def test_creative_service_base_transport_with_adc(): +def test_prediction_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.ads.admanager_v1.services.creative_service.transports.CreativeServiceTransport._prep_wrapped_messages" + "google.ai.generativelanguage_v1beta.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CreativeServiceTransport() + transport = transports.PredictionServiceTransport() adc.assert_called_once() -def test_creative_service_auth_adc(): +def test_prediction_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CreativeServiceClient() + PredictionServiceClient() adc.assert_called_once_with( scopes=None, default_scopes=(), @@ -1833,12 +2027,135 @@ def test_creative_service_auth_adc(): ) -def test_creative_service_http_transport_client_cert_source_for_mtls(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + transports.PredictionServiceRestTransport, + ], +) +def test_prediction_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PredictionServiceGrpcTransport, grpc_helpers), + (transports.PredictionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_prediction_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_prediction_service_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" ) as mock_configure_mtls_channel: - transports.CreativeServiceRestTransport( + transports.PredictionServiceRestTransport( credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) @@ -1847,42 +2164,46 @@ def test_creative_service_http_transport_client_cert_source_for_mtls(): @pytest.mark.parametrize( "transport_name", [ + "grpc", + "grpc_asyncio", "rest", ], ) -def test_creative_service_host_no_port(transport_name): - client = CreativeServiceClient( +def test_prediction_service_host_no_port(transport_name): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( - api_endpoint="admanager.googleapis.com" + api_endpoint="generativelanguage.googleapis.com" ), transport=transport_name, ) assert client.transport._host == ( - "admanager.googleapis.com:443" + "generativelanguage.googleapis.com:443" if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com" + else "https://generativelanguage.googleapis.com" ) @pytest.mark.parametrize( "transport_name", [ + "grpc", + "grpc_asyncio", "rest", ], ) -def test_creative_service_host_with_port(transport_name): - client = CreativeServiceClient( +def test_prediction_service_host_with_port(transport_name): + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( - api_endpoint="admanager.googleapis.com:8000" + api_endpoint="generativelanguage.googleapis.com:8000" ), transport=transport_name, ) assert client.transport._host == ( - "admanager.googleapis.com:8000" + "generativelanguage.googleapis.com:8000" if transport_name in ["grpc", "grpc_asyncio"] - else "https://admanager.googleapis.com:8000" + else "https://generativelanguage.googleapis.com:8000" ) @@ -1892,111 +2213,165 @@ def test_creative_service_host_with_port(transport_name): "rest", ], ) -def test_creative_service_client_transport_session_collision(transport_name): +def test_prediction_service_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() - client1 = CreativeServiceClient( + client1 = PredictionServiceClient( credentials=creds1, transport=transport_name, ) - client2 = CreativeServiceClient( + client2 = PredictionServiceClient( credentials=creds2, transport=transport_name, ) - session1 = client1.transport.get_creative._session - session2 = client2.transport.get_creative._session + session1 = client1.transport.predict._session + session2 = client2.transport.predict._session assert session1 != session2 - session1 = client1.transport.list_creatives._session - session2 = client2.transport.list_creatives._session - assert session1 != session2 - -def test_ad_partner_path(): - network_code = "squid" - ad_partner = "clam" - expected = "networks/{network_code}/adPartners/{ad_partner}".format( - network_code=network_code, - ad_partner=ad_partner, - ) - actual = CreativeServiceClient.ad_partner_path(network_code, ad_partner) - assert expected == actual +def test_prediction_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) -def test_parse_ad_partner_path(): - expected = { - "network_code": "whelk", - "ad_partner": "octopus", - } - path = CreativeServiceClient.ad_partner_path(**expected) + # Check that channel is used if provided. + transport = transports.PredictionServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None - # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_ad_partner_path(path) - assert expected == actual +def test_prediction_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) -def test_company_path(): - network_code = "oyster" - company = "nudibranch" - expected = "networks/{network_code}/companies/{company}".format( - network_code=network_code, - company=company, + # Check that channel is used if provided. + transport = transports.PredictionServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, ) - actual = CreativeServiceClient.company_path(network_code, company) - assert expected == actual - + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None -def test_parse_company_path(): - expected = { - "network_code": "cuttlefish", - "company": "mussel", - } - path = CreativeServiceClient.company_path(**expected) - - # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_company_path(path) - assert expected == actual +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() -def test_creative_path(): - network_code = "winkle" - creative = "nautilus" - expected = "networks/{network_code}/creatives/{creative}".format( - network_code=network_code, - creative=creative, - ) - actual = CreativeServiceClient.creative_path(network_code, creative) - assert expected == actual + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred -def test_parse_creative_path(): - expected = { - "network_code": "scallop", - "creative": "abalone", - } - path = CreativeServiceClient.creative_path(**expected) +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) - # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_creative_path(path) - assert expected == actual + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel -def test_network_path(): - network_code = "squid" - expected = "networks/{network_code}".format( - network_code=network_code, +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, ) - actual = CreativeServiceClient.network_path(network_code) + actual = PredictionServiceClient.model_path(model) assert expected == actual -def test_parse_network_path(): +def test_parse_model_path(): expected = { - "network_code": "clam", + "model": "clam", } - path = CreativeServiceClient.network_path(**expected) + path = PredictionServiceClient.model_path(**expected) # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_network_path(path) + actual = PredictionServiceClient.parse_model_path(path) assert expected == actual @@ -2005,7 +2380,7 @@ def test_common_billing_account_path(): expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) - actual = CreativeServiceClient.common_billing_account_path(billing_account) + actual = PredictionServiceClient.common_billing_account_path(billing_account) assert expected == actual @@ -2013,10 +2388,10 @@ def test_parse_common_billing_account_path(): expected = { "billing_account": "octopus", } - path = CreativeServiceClient.common_billing_account_path(**expected) + path = PredictionServiceClient.common_billing_account_path(**expected) # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_common_billing_account_path(path) + actual = PredictionServiceClient.parse_common_billing_account_path(path) assert expected == actual @@ -2025,7 +2400,7 @@ def test_common_folder_path(): expected = "folders/{folder}".format( folder=folder, ) - actual = CreativeServiceClient.common_folder_path(folder) + actual = PredictionServiceClient.common_folder_path(folder) assert expected == actual @@ -2033,10 +2408,10 @@ def test_parse_common_folder_path(): expected = { "folder": "nudibranch", } - path = CreativeServiceClient.common_folder_path(**expected) + path = PredictionServiceClient.common_folder_path(**expected) # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_common_folder_path(path) + actual = PredictionServiceClient.parse_common_folder_path(path) assert expected == actual @@ -2045,7 +2420,7 @@ def test_common_organization_path(): expected = "organizations/{organization}".format( organization=organization, ) - actual = CreativeServiceClient.common_organization_path(organization) + actual = PredictionServiceClient.common_organization_path(organization) assert expected == actual @@ -2053,10 +2428,10 @@ def test_parse_common_organization_path(): expected = { "organization": "mussel", } - path = CreativeServiceClient.common_organization_path(**expected) + path = PredictionServiceClient.common_organization_path(**expected) # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_common_organization_path(path) + actual = PredictionServiceClient.parse_common_organization_path(path) assert expected == actual @@ -2065,7 +2440,7 @@ def test_common_project_path(): expected = "projects/{project}".format( project=project, ) - actual = CreativeServiceClient.common_project_path(project) + actual = PredictionServiceClient.common_project_path(project) assert expected == actual @@ -2073,10 +2448,10 @@ def test_parse_common_project_path(): expected = { "project": "nautilus", } - path = CreativeServiceClient.common_project_path(**expected) + path = PredictionServiceClient.common_project_path(**expected) # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_common_project_path(path) + actual = PredictionServiceClient.parse_common_project_path(path) assert expected == actual @@ -2087,7 +2462,7 @@ def test_common_location_path(): project=project, location=location, ) - actual = CreativeServiceClient.common_location_path(project, location) + actual = PredictionServiceClient.common_location_path(project, location) assert expected == actual @@ -2096,10 +2471,10 @@ def test_parse_common_location_path(): "project": "squid", "location": "clam", } - path = CreativeServiceClient.common_location_path(**expected) + path = PredictionServiceClient.common_location_path(**expected) # Check that the path construction is reversible. - actual = CreativeServiceClient.parse_common_location_path(path) + actual = PredictionServiceClient.parse_common_location_path(path) assert expected == actual @@ -2107,18 +2482,18 @@ def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( - transports.CreativeServiceTransport, "_prep_wrapped_messages" + transports.PredictionServiceTransport, "_prep_wrapped_messages" ) as prep: - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) with mock.patch.object( - transports.CreativeServiceTransport, "_prep_wrapped_messages" + transports.PredictionServiceTransport, "_prep_wrapped_messages" ) as prep: - transport_class = CreativeServiceClient.get_transport_class() + transport_class = PredictionServiceClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, @@ -2126,71 +2501,28 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = CreativeServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "networks/sample1/operations/reports/exports/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = CreativeServiceClient( +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PredictionServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc_asyncio", ) - request_init = {"name": "networks/sample1/operations/reports/exports/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() def test_transport_close(): transports = { "rest": "_session", + "grpc": "_grpc_channel", } for transport, close_name in transports.items(): - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object( @@ -2204,9 +2536,10 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", + "grpc", ] for transport in transports: - client = CreativeServiceClient( + client = PredictionServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. @@ -2220,7 +2553,11 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (CreativeServiceClient, transports.CreativeServiceRestTransport), + (PredictionServiceClient, transports.PredictionServiceGrpcTransport), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + ), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-analytics-data/CHANGELOG.md b/packages/google-analytics-data/CHANGELOG.md index ad685dbf13a6..f863e9dda9cd 100644 --- a/packages/google-analytics-data/CHANGELOG.md +++ b/packages/google-analytics-data/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [0.18.12](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.11...google-analytics-data-v0.18.12) (2024-09-23) + + +### Features + +* add `GetPropertyQuotasSnapshot` method to the Data API v1alpha ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) +* add `PropertyQuotasSnapshot` type to the Data API v1alpha ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) + + +### Documentation + +* update the documentation for the `CreateReportTask` method ([65f098a](https://github.com/googleapis/google-cloud-python/commit/65f098a1125677c69240849703a0b97bcab7fc4c)) + ## [0.18.11](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.10...google-analytics-data-v0.18.11) (2024-08-08) diff --git a/packages/google-analytics-data/google/analytics/data/gapic_version.py b/packages/google-analytics-data/google/analytics/data/gapic_version.py index 558c8aab67c5..48dad06a30d5 100644 --- a/packages/google-analytics-data/google/analytics/data/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.18.12" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py b/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py index d27c32f3b750..4b5c6ad3ac51 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py @@ -32,6 +32,7 @@ CreateRecurringAudienceListRequest, CreateReportTaskRequest, GetAudienceListRequest, + GetPropertyQuotasSnapshotRequest, GetRecurringAudienceListRequest, GetReportTaskRequest, ListAudienceListsRequest, @@ -40,6 +41,7 @@ ListRecurringAudienceListsResponse, ListReportTasksRequest, ListReportTasksResponse, + PropertyQuotasSnapshot, QueryAudienceListRequest, QueryAudienceListResponse, QueryReportTaskRequest, @@ -172,6 +174,7 @@ "FunnelStep", "FunnelSubReport", "GetAudienceListRequest", + "GetPropertyQuotasSnapshotRequest", "GetRecurringAudienceListRequest", "GetReportTaskRequest", "InListFilter", @@ -190,6 +193,7 @@ "NumericValue", "OrderBy", "PropertyQuota", + "PropertyQuotasSnapshot", "QueryAudienceListRequest", "QueryAudienceListResponse", "QueryReportTaskRequest", diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json index 886097e06d3f..bb6c0b6f462d 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json @@ -30,6 +30,11 @@ "get_audience_list" ] }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, "GetRecurringAudienceList": { "methods": [ "get_recurring_audience_list" @@ -100,6 +105,11 @@ "get_audience_list" ] }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, "GetRecurringAudienceList": { "methods": [ "get_recurring_audience_list" @@ -170,6 +180,11 @@ "get_audience_list" ] }, + "GetPropertyQuotasSnapshot": { + "methods": [ + "get_property_quotas_snapshot" + ] + }, "GetRecurringAudienceList": { "methods": [ "get_recurring_audience_list" diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py index 558c8aab67c5..48dad06a30d5 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.18.12" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py index 5afbe71746b5..85f47086326b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py @@ -71,6 +71,12 @@ class AlphaAnalyticsDataAsyncClient: parse_audience_list_path = staticmethod( AlphaAnalyticsDataClient.parse_audience_list_path ) + property_quotas_snapshot_path = staticmethod( + AlphaAnalyticsDataClient.property_quotas_snapshot_path + ) + parse_property_quotas_snapshot_path = staticmethod( + AlphaAnalyticsDataClient.parse_property_quotas_snapshot_path + ) recurring_audience_list_path = staticmethod( AlphaAnalyticsDataClient.recurring_audience_list_path ) @@ -1468,6 +1474,118 @@ async def sample_list_recurring_audience_lists(): # Done; return the response. return response + async def get_property_quotas_snapshot( + self, + request: Optional[ + Union[analytics_data_api.GetPropertyQuotasSnapshotRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = await client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest, dict]]): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + name (:class:`str`): + Required. Quotas from this property will be listed in + the response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetPropertyQuotasSnapshotRequest): + request = analytics_data_api.GetPropertyQuotasSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_property_quotas_snapshot + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def create_report_task( self, request: Optional[ @@ -1485,6 +1603,12 @@ async def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py index 4a3bc827021a..2b333ae4af4a 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py @@ -207,6 +207,21 @@ def parse_audience_list_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def property_quotas_snapshot_path( + property: str, + ) -> str: + """Returns a fully-qualified property_quotas_snapshot string.""" + return "properties/{property}/propertyQuotasSnapshot".format( + property=property, + ) + + @staticmethod + def parse_property_quotas_snapshot_path(path: str) -> Dict[str, str]: + """Parses a property_quotas_snapshot path into its component segments.""" + m = re.match(r"^properties/(?P.+?)/propertyQuotasSnapshot$", path) + return m.groupdict() if m else {} + @staticmethod def recurring_audience_list_path( property: str, @@ -1898,6 +1913,117 @@ def sample_list_recurring_audience_lists(): # Done; return the response. return response + def get_property_quotas_snapshot( + self, + request: Optional[ + Union[analytics_data_api.GetPropertyQuotasSnapshotRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest, dict]): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + name (str): + Required. Quotas from this property will be listed in + the response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.analytics.data_v1alpha.types.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetPropertyQuotasSnapshotRequest): + request = analytics_data_api.GetPropertyQuotasSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_property_quotas_snapshot + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def create_report_task( self, request: Optional[ @@ -1915,6 +2041,12 @@ def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py index c2c66c588816..3ba97b9f363e 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py @@ -180,6 +180,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_property_quotas_snapshot: gapic_v1.method.wrap_method( + self.get_property_quotas_snapshot, + default_timeout=None, + client_info=client_info, + ), self.create_report_task: gapic_v1.method.wrap_method( self.create_report_task, default_timeout=None, @@ -320,6 +325,18 @@ def list_recurring_audience_lists( ]: raise NotImplementedError() + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + Union[ + analytics_data_api.PropertyQuotasSnapshot, + Awaitable[analytics_data_api.PropertyQuotasSnapshot], + ], + ]: + raise NotImplementedError() + @property def create_report_task( self, diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py index b9962cad40e2..c43f7d864e80 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py @@ -659,6 +659,37 @@ def list_recurring_audience_lists( ) return self._stubs["list_recurring_audience_lists"] + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + analytics_data_api.PropertyQuotasSnapshot, + ]: + r"""Return a callable for the get property quotas snapshot method over gRPC. + + Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + Returns: + Callable[[~.GetPropertyQuotasSnapshotRequest], + ~.PropertyQuotasSnapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_property_quotas_snapshot" not in self._stubs: + self._stubs["get_property_quotas_snapshot"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1alpha.AlphaAnalyticsData/GetPropertyQuotasSnapshot", + request_serializer=analytics_data_api.GetPropertyQuotasSnapshotRequest.serialize, + response_deserializer=analytics_data_api.PropertyQuotasSnapshot.deserialize, + ) + return self._stubs["get_property_quotas_snapshot"] + @property def create_report_task( self, @@ -672,6 +703,12 @@ def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + Returns: Callable[[~.CreateReportTaskRequest], ~.Operation]: diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py index c05a987fbb2b..a220f2ddb524 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py @@ -667,6 +667,37 @@ def list_recurring_audience_lists( ) return self._stubs["list_recurring_audience_lists"] + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + Awaitable[analytics_data_api.PropertyQuotasSnapshot], + ]: + r"""Return a callable for the get property quotas snapshot method over gRPC. + + Get all property quotas organized by quota category + for a given property. This will charge 1 property quota + from the category with the most quota. + + Returns: + Callable[[~.GetPropertyQuotasSnapshotRequest], + Awaitable[~.PropertyQuotasSnapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_property_quotas_snapshot" not in self._stubs: + self._stubs["get_property_quotas_snapshot"] = self.grpc_channel.unary_unary( + "/google.analytics.data.v1alpha.AlphaAnalyticsData/GetPropertyQuotasSnapshot", + request_serializer=analytics_data_api.GetPropertyQuotasSnapshotRequest.serialize, + response_deserializer=analytics_data_api.PropertyQuotasSnapshot.deserialize, + ) + return self._stubs["get_property_quotas_snapshot"] + @property def create_report_task( self, @@ -681,6 +712,12 @@ def create_report_task( running asynchronous request to form a customized report of your Google Analytics event data. + A report task will be retained and available for + querying for 72 hours after it has been created. + + A report task created by one user can be listed and + queried by all users who have access to the property. + Returns: Callable[[~.CreateReportTaskRequest], Awaitable[~.Operation]]: @@ -841,6 +878,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_property_quotas_snapshot: gapic_v1.method_async.wrap_method( + self.get_property_quotas_snapshot, + default_timeout=None, + client_info=client_info, + ), self.create_report_task: gapic_v1.method_async.wrap_method( self.create_report_task, default_timeout=None, diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py index 5f98dacd404b..510c1d55640b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py @@ -103,6 +103,14 @@ def post_get_audience_list(self, response): logging.log(f"Received response: {response}") return response + def pre_get_property_quotas_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_property_quotas_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_recurring_audience_list(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -275,6 +283,31 @@ def post_get_audience_list( """ return response + def pre_get_property_quotas_snapshot( + self, + request: analytics_data_api.GetPropertyQuotasSnapshotRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + analytics_data_api.GetPropertyQuotasSnapshotRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_property_quotas_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_get_property_quotas_snapshot( + self, response: analytics_data_api.PropertyQuotasSnapshot + ) -> analytics_data_api.PropertyQuotasSnapshot: + """Post-rpc interceptor for get_property_quotas_snapshot + + Override in a subclass to manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. + """ + return response + def pre_get_recurring_audience_list( self, request: analytics_data_api.GetRecurringAudienceListRequest, @@ -1002,6 +1035,98 @@ def __call__( resp = self._interceptor.post_get_audience_list(resp) return resp + class _GetPropertyQuotasSnapshot(AlphaAnalyticsDataRestStub): + def __hash__(self): + return hash("GetPropertyQuotasSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: analytics_data_api.GetPropertyQuotasSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analytics_data_api.PropertyQuotasSnapshot: + r"""Call the get property quotas + snapshot method over HTTP. + + Args: + request (~.analytics_data_api.GetPropertyQuotasSnapshotRequest): + The request object. A request to return the + PropertyQuotasSnapshot for a given + category. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.analytics_data_api.PropertyQuotasSnapshot: + Current state of all Property Quotas + organized by quota category. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=properties/*/propertyQuotasSnapshot}", + }, + ] + request, metadata = self._interceptor.pre_get_property_quotas_snapshot( + request, metadata + ) + pb_request = analytics_data_api.GetPropertyQuotasSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.PropertyQuotasSnapshot() + pb_resp = analytics_data_api.PropertyQuotasSnapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_property_quotas_snapshot(resp) + return resp + class _GetRecurringAudienceList(AlphaAnalyticsDataRestStub): def __hash__(self): return hash("GetRecurringAudienceList") @@ -1876,6 +2001,17 @@ def get_audience_list( # In C++ this would require a dynamic_cast return self._GetAudienceList(self._session, self._host, self._interceptor) # type: ignore + @property + def get_property_quotas_snapshot( + self, + ) -> Callable[ + [analytics_data_api.GetPropertyQuotasSnapshotRequest], + analytics_data_api.PropertyQuotasSnapshot, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPropertyQuotasSnapshot(self._session, self._host, self._interceptor) # type: ignore + @property def get_recurring_audience_list( self, diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py index be0c011beb28..c5f36ceee95f 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py @@ -23,6 +23,7 @@ CreateRecurringAudienceListRequest, CreateReportTaskRequest, GetAudienceListRequest, + GetPropertyQuotasSnapshotRequest, GetRecurringAudienceListRequest, GetReportTaskRequest, ListAudienceListsRequest, @@ -31,6 +32,7 @@ ListRecurringAudienceListsResponse, ListReportTasksRequest, ListReportTasksResponse, + PropertyQuotasSnapshot, QueryAudienceListRequest, QueryAudienceListResponse, QueryReportTaskRequest, @@ -129,6 +131,7 @@ "CreateRecurringAudienceListRequest", "CreateReportTaskRequest", "GetAudienceListRequest", + "GetPropertyQuotasSnapshotRequest", "GetRecurringAudienceListRequest", "GetReportTaskRequest", "ListAudienceListsRequest", @@ -137,6 +140,7 @@ "ListRecurringAudienceListsResponse", "ListReportTasksRequest", "ListReportTasksResponse", + "PropertyQuotasSnapshot", "QueryAudienceListRequest", "QueryAudienceListResponse", "QueryReportTaskRequest", diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py index 40cf2af6b247..5ef02adbab94 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py @@ -31,6 +31,8 @@ "GetRecurringAudienceListRequest", "ListRecurringAudienceListsRequest", "ListRecurringAudienceListsResponse", + "GetPropertyQuotasSnapshotRequest", + "PropertyQuotasSnapshot", "GetAudienceListRequest", "ListAudienceListsRequest", "ListAudienceListsResponse", @@ -368,6 +370,60 @@ def raw_page(self): ) +class GetPropertyQuotasSnapshotRequest(proto.Message): + r"""A request to return the PropertyQuotasSnapshot for a given + category. + + Attributes: + name (str): + Required. Quotas from this property will be listed in the + response. Format: + ``properties/{property}/propertyQuotasSnapshot`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PropertyQuotasSnapshot(proto.Message): + r"""Current state of all Property Quotas organized by quota + category. + + Attributes: + name (str): + Identifier. The property quota snapshot + resource name. + core_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for core property tokens + realtime_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for realtime property tokens + funnel_property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + Property Quota for funnel property tokens + """ + + name: str = proto.Field( + proto.STRING, + number=4, + ) + core_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=1, + message=data.PropertyQuota, + ) + realtime_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=2, + message=data.PropertyQuota, + ) + funnel_property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=3, + message=data.PropertyQuota, + ) + + class GetAudienceListRequest(proto.Message): r"""A request to retrieve configuration metadata about a specific audience list. @@ -942,7 +998,7 @@ class RunFunnelReportRequest(proto.Message): Attributes: property (str): - Optional. A Google Analytics GA4 property identifier whose + Optional. A Google Analytics property identifier whose events are tracked. Specified in the URL path and not the body. To learn more, see `where to find your Property ID `__. @@ -1146,7 +1202,7 @@ class ReportTask(proto.Message): name (str): Output only. Identifier. The report task resource name assigned during creation. Format: - ``properties/{property}/reportTasks/{report_task}`` + "properties/{property}/reportTasks/{report_task}". report_definition (google.analytics.data_v1alpha.types.ReportTask.ReportDefinition): Optional. A report definition to fetch report data, which describes the structure of a report. @@ -1236,8 +1292,8 @@ class ReportDefinition(proto.Message): returned if they are not separately removed by a filter. Regardless of this ``keep_empty_rows`` setting, only data - recorded by the Google Analytics (GA4) property can be - displayed in a report. + recorded by the Google Analytics property can be displayed + in a report. For example if a property never logs a ``purchase`` event, then a query for the ``eventName`` dimension and diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py index d0d65e10b736..f3e86639400b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py @@ -293,7 +293,7 @@ class MetricType(proto.Enum): class RestrictedMetricType(proto.Enum): r"""Categories of data that you may be restricted from viewing on - certain GA4 properties. + certain Google Analytics properties. Values: RESTRICTED_METRIC_TYPE_UNSPECIFIED (0): @@ -2231,7 +2231,7 @@ class Segment(proto.Message): particular line of products or who visit a specific part of your site or trigger certain events in your app. - To learn more, see `GA4 Segment + To learn more, see `Segment Builder `__. This message has `oneof`_ fields (mutually exclusive fields). diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py index 558c8aab67c5..48dad06a30d5 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.18.12" # {x-release-please-version} diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py new file mode 100644 index 000000000000..cfa47528bf6b --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPropertyQuotasSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = await client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py new file mode 100644 index 000000000000..964edacdbb5d --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPropertyQuotasSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_get_property_quotas_snapshot(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetPropertyQuotasSnapshotRequest( + name="name_value", + ) + + # Make the request + response = client.get_property_quotas_snapshot(request=request) + + # Handle the response + print(response) + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync] diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json index 619e88f4243b..d5b5816eb6e5 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.1.0" + "version": "0.18.12" }, "snippets": [ { @@ -679,6 +679,167 @@ ], "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.get_property_quotas_snapshot", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetPropertyQuotasSnapshot", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetPropertyQuotasSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.PropertyQuotasSnapshot", + "shortName": "get_property_quotas_snapshot" + }, + "description": "Sample for GetPropertyQuotasSnapshot", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.get_property_quotas_snapshot", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetPropertyQuotasSnapshot", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetPropertyQuotasSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetPropertyQuotasSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.PropertyQuotasSnapshot", + "shortName": "get_property_quotas_snapshot" + }, + "description": "Sample for GetPropertyQuotasSnapshot", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetPropertyQuotasSnapshot_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_property_quotas_snapshot_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json index 29e86a085403..753d0fcebd81 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.1.0" + "version": "0.18.12" }, "snippets": [ { diff --git a/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py b/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py index 416f17a987b4..463b61dcee73 100644 --- a/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py +++ b/packages/google-analytics-data/scripts/fixup_data_v1alpha_keywords.py @@ -43,6 +43,7 @@ class dataCallTransformer(cst.CSTTransformer): 'create_recurring_audience_list': ('parent', 'recurring_audience_list', ), 'create_report_task': ('parent', 'report_task', ), 'get_audience_list': ('name', ), + 'get_property_quotas_snapshot': ('name', ), 'get_recurring_audience_list': ('name', ), 'get_report_task': ('name', ), 'list_audience_lists': ('parent', 'page_size', 'page_token', ), diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py index 16d76b2f738b..1a4da5db4733 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py @@ -5120,11 +5120,11 @@ async def test_list_recurring_audience_lists_async_pages(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.CreateReportTaskRequest, + analytics_data_api.GetPropertyQuotasSnapshotRequest, dict, ], ) -def test_create_report_task(request_type, transport: str = "grpc"): +def test_get_property_quotas_snapshot(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5136,23 +5136,26 @@ def test_create_report_task(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_report_task(request) + call.return_value = analytics_data_api.PropertyQuotasSnapshot( + name="name_value", + ) + response = client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == "name_value" -def test_create_report_task_empty_call(): +def test_get_property_quotas_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -5162,18 +5165,18 @@ def test_create_report_task_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_report_task() + client.get_property_quotas_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateReportTaskRequest() + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest() -def test_create_report_task_non_empty_request_with_auto_populated_field(): +def test_get_property_quotas_snapshot_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5184,26 +5187,26 @@ def test_create_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.CreateReportTaskRequest( - parent="parent_value", + request = analytics_data_api.GetPropertyQuotasSnapshotRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_report_task(request=request) + client.get_property_quotas_snapshot(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateReportTaskRequest( - parent="parent_value", + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest( + name="name_value", ) -def test_create_report_task_use_cached_wrapped_rpc(): +def test_get_property_quotas_snapshot_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5218,7 +5221,8 @@ def test_create_report_task_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_report_task in client._transport._wrapped_methods + client._transport.get_property_quotas_snapshot + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -5227,20 +5231,15 @@ def test_create_report_task_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_report_task + client._transport.get_property_quotas_snapshot ] = mock_rpc request = {} - client.create_report_task(request) + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_report_task(request) + client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5248,7 +5247,7 @@ def test_create_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_report_task_empty_call_async(): +async def test_get_property_quotas_snapshot_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -5258,20 +5257,22 @@ async def test_create_report_task_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.PropertyQuotasSnapshot( + name="name_value", + ) ) - response = await client.create_report_task() + response = await client.get_property_quotas_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateReportTaskRequest() + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest() @pytest.mark.asyncio -async def test_create_report_task_async_use_cached_wrapped_rpc( +async def test_get_property_quotas_snapshot_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5288,7 +5289,7 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_report_task + client._client._transport.get_property_quotas_snapshot in client._client._transport._wrapped_methods ) @@ -5296,21 +5297,16 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_report_task + client._client._transport.get_property_quotas_snapshot ] = mock_rpc request = {} - await client.create_report_task(request) + await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_report_task(request) + await client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5318,9 +5314,9 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_report_task_async( +async def test_get_property_quotas_snapshot_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.CreateReportTaskRequest, + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5333,46 +5329,49 @@ async def test_create_report_task_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.PropertyQuotasSnapshot( + name="name_value", + ) ) - response = await client.create_report_task(request) + response = await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_create_report_task_async_from_dict(): - await test_create_report_task_async(request_type=dict) +async def test_get_property_quotas_snapshot_async_from_dict(): + await test_get_property_quotas_snapshot_async(request_type=dict) -def test_create_report_task_field_headers(): +def test_get_property_quotas_snapshot_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_report_task(request) + call.return_value = analytics_data_api.PropertyQuotasSnapshot() + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5383,30 +5382,30 @@ def test_create_report_task_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_report_task_field_headers_async(): +async def test_get_property_quotas_snapshot_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + analytics_data_api.PropertyQuotasSnapshot() ) - await client.create_report_task(request) + await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5417,41 +5416,37 @@ async def test_create_report_task_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_report_task_flattened(): +def test_get_property_quotas_snapshot_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = analytics_data_api.PropertyQuotasSnapshot() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_report_task( - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + client.get_property_quotas_snapshot( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].report_task - mock_val = analytics_data_api.ReportTask(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_report_task_flattened_error(): +def test_get_property_quotas_snapshot_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5459,50 +5454,45 @@ def test_create_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_report_task( - analytics_data_api.CreateReportTaskRequest(), - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_report_task_flattened_async(): +async def test_get_property_quotas_snapshot_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_report_task), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = analytics_data_api.PropertyQuotasSnapshot() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.PropertyQuotasSnapshot() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_report_task( - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + response = await client.get_property_quotas_snapshot( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].report_task - mock_val = analytics_data_api.ReportTask(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_report_task_flattened_error_async(): +async def test_get_property_quotas_snapshot_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5510,21 +5500,20 @@ async def test_create_report_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_report_task( - analytics_data_api.CreateReportTaskRequest(), - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + await client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.QueryReportTaskRequest, + analytics_data_api.CreateReportTaskRequest, dict, ], ) -def test_query_report_task(request_type, transport: str = "grpc"): +def test_create_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5536,26 +5525,23 @@ def test_query_report_task(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse( - row_count=992, - ) - response = client.query_report_task(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryReportTaskResponse) - assert response.row_count == 992 + assert isinstance(response, future.Future) -def test_query_report_task_empty_call(): +def test_create_report_task_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -5565,18 +5551,18 @@ def test_query_report_task_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.query_report_task() + client.create_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.QueryReportTaskRequest() + assert args[0] == analytics_data_api.CreateReportTaskRequest() -def test_query_report_task_non_empty_request_with_auto_populated_field(): +def test_create_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5587,26 +5573,26 @@ def test_query_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.QueryReportTaskRequest( - name="name_value", + request = analytics_data_api.CreateReportTaskRequest( + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.query_report_task(request=request) + client.create_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.QueryReportTaskRequest( - name="name_value", + assert args[0] == analytics_data_api.CreateReportTaskRequest( + parent="parent_value", ) -def test_query_report_task_use_cached_wrapped_rpc(): +def test_create_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5620,7 +5606,9 @@ def test_query_report_task_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.query_report_task in client._transport._wrapped_methods + assert ( + client._transport.create_report_task in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -5628,15 +5616,20 @@ def test_query_report_task_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.query_report_task + client._transport.create_report_task ] = mock_rpc request = {} - client.query_report_task(request) + client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.query_report_task(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5644,7 +5637,7 @@ def test_query_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_query_report_task_empty_call_async(): +async def test_create_report_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -5654,22 +5647,20 @@ async def test_query_report_task_empty_call_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse( - row_count=992, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.query_report_task() + response = await client.create_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.QueryReportTaskRequest() + assert args[0] == analytics_data_api.CreateReportTaskRequest() @pytest.mark.asyncio -async def test_query_report_task_async_use_cached_wrapped_rpc( +async def test_create_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5686,7 +5677,7 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.query_report_task + client._client._transport.create_report_task in client._client._transport._wrapped_methods ) @@ -5694,16 +5685,21 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.query_report_task + client._client._transport.create_report_task ] = mock_rpc request = {} - await client.query_report_task(request) + await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.query_report_task(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5711,9 +5707,9 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_query_report_task_async( +async def test_create_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.QueryReportTaskRequest, + request_type=analytics_data_api.CreateReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5726,49 +5722,46 @@ async def test_query_report_task_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse( - row_count=992, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.query_report_task(request) + response = await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryReportTaskResponse) - assert response.row_count == 992 + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_query_report_task_async_from_dict(): - await test_query_report_task_async(request_type=dict) +async def test_create_report_task_async_from_dict(): + await test_create_report_task_async(request_type=dict) -def test_query_report_task_field_headers(): +def test_create_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: - call.return_value = analytics_data_api.QueryReportTaskResponse() - client.query_report_task(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5779,30 +5772,30 @@ def test_query_report_task_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_query_report_task_field_headers_async(): +async def test_create_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.CreateReportTaskRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse() + operations_pb2.Operation(name="operations/op") ) - await client.query_report_task(request) + await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5813,37 +5806,41 @@ async def test_query_report_task_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_query_report_task_flattened(): +def test_create_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.query_report_task( - name="name_value", + client.create_report_task( + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].report_task + mock_val = analytics_data_api.ReportTask(name="name_value") assert arg == mock_val -def test_query_report_task_flattened_error(): +def test_create_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5851,45 +5848,50 @@ def test_query_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.query_report_task( - analytics_data_api.QueryReportTaskRequest(), - name="name_value", + client.create_report_task( + analytics_data_api.CreateReportTaskRequest(), + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) @pytest.mark.asyncio -async def test_query_report_task_flattened_async(): +async def test_create_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.query_report_task( - name="name_value", + response = await client.create_report_task( + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].report_task + mock_val = analytics_data_api.ReportTask(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_query_report_task_flattened_error_async(): +async def test_create_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5897,20 +5899,21 @@ async def test_query_report_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.query_report_task( - analytics_data_api.QueryReportTaskRequest(), - name="name_value", + await client.create_report_task( + analytics_data_api.CreateReportTaskRequest(), + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetReportTaskRequest, + analytics_data_api.QueryReportTaskRequest, dict, ], ) -def test_get_report_task(request_type, transport: str = "grpc"): +def test_query_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5921,25 +5924,27 @@ def test_get_report_task(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask( - name="name_value", + call.return_value = analytics_data_api.QueryReportTaskResponse( + row_count=992, ) - response = client.get_report_task(request) + response = client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.ReportTask) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 -def test_get_report_task_empty_call(): +def test_query_report_task_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -5948,17 +5953,19 @@ def test_get_report_task_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_report_task() + client.query_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetReportTaskRequest() + assert args[0] == analytics_data_api.QueryReportTaskRequest() -def test_get_report_task_non_empty_request_with_auto_populated_field(): +def test_query_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5969,24 +5976,26 @@ def test_get_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.GetReportTaskRequest( + request = analytics_data_api.QueryReportTaskRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_report_task(request=request) + client.query_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetReportTaskRequest( + assert args[0] == analytics_data_api.QueryReportTaskRequest( name="name_value", ) -def test_get_report_task_use_cached_wrapped_rpc(): +def test_query_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6000,21 +6009,23 @@ def test_get_report_task_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_report_task in client._transport._wrapped_methods + assert client._transport.query_report_task in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_report_task] = mock_rpc + client._transport._wrapped_methods[ + client._transport.query_report_task + ] = mock_rpc request = {} - client.get_report_task(request) + client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_report_task(request) + client.query_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6022,7 +6033,7 @@ def test_get_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_report_task_empty_call_async(): +async def test_query_report_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -6031,21 +6042,23 @@ async def test_get_report_task_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask( - name="name_value", + analytics_data_api.QueryReportTaskResponse( + row_count=992, ) ) - response = await client.get_report_task() + response = await client.query_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetReportTaskRequest() + assert args[0] == analytics_data_api.QueryReportTaskRequest() @pytest.mark.asyncio -async def test_get_report_task_async_use_cached_wrapped_rpc( +async def test_query_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6062,7 +6075,7 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_report_task + client._client._transport.query_report_task in client._client._transport._wrapped_methods ) @@ -6070,16 +6083,16 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_report_task + client._client._transport.query_report_task ] = mock_rpc request = {} - await client.get_report_task(request) + await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_report_task(request) + await client.query_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6087,9 +6100,9 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_report_task_async( +async def test_query_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.GetReportTaskRequest, + request_type=analytics_data_api.QueryReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6101,46 +6114,50 @@ async def test_get_report_task_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask( - name="name_value", + analytics_data_api.QueryReportTaskResponse( + row_count=992, ) ) - response = await client.get_report_task(request) + response = await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.ReportTask) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 @pytest.mark.asyncio -async def test_get_report_task_async_from_dict(): - await test_get_report_task_async(request_type=dict) +async def test_query_report_task_async_from_dict(): + await test_query_report_task_async(request_type=dict) -def test_get_report_task_field_headers(): +def test_query_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: - call.return_value = analytics_data_api.ReportTask() - client.get_report_task(request) + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: + call.return_value = analytics_data_api.QueryReportTaskResponse() + client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6156,23 +6173,25 @@ def test_get_report_task_field_headers(): @pytest.mark.asyncio -async def test_get_report_task_field_headers_async(): +async def test_query_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.QueryReportTaskRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask() + analytics_data_api.QueryReportTaskResponse() ) - await client.get_report_task(request) + await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6187,18 +6206,20 @@ async def test_get_report_task_field_headers_async(): ) in kw["metadata"] -def test_get_report_task_flattened(): +def test_query_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask() + call.return_value = analytics_data_api.QueryReportTaskResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_report_task( + client.query_report_task( name="name_value", ) @@ -6211,7 +6232,7 @@ def test_get_report_task_flattened(): assert arg == mock_val -def test_get_report_task_flattened_error(): +def test_query_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6219,29 +6240,31 @@ def test_get_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_report_task( - analytics_data_api.GetReportTaskRequest(), + client.query_report_task( + analytics_data_api.QueryReportTaskRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_report_task_flattened_async(): +async def test_query_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object( + type(client.transport.query_report_task), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask() + call.return_value = analytics_data_api.QueryReportTaskResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask() + analytics_data_api.QueryReportTaskResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_report_task( + response = await client.query_report_task( name="name_value", ) @@ -6255,7 +6278,7 @@ async def test_get_report_task_flattened_async(): @pytest.mark.asyncio -async def test_get_report_task_flattened_error_async(): +async def test_query_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6263,8 +6286,8 @@ async def test_get_report_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_report_task( - analytics_data_api.GetReportTaskRequest(), + await client.query_report_task( + analytics_data_api.QueryReportTaskRequest(), name="name_value", ) @@ -6272,11 +6295,11 @@ async def test_get_report_task_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListReportTasksRequest, + analytics_data_api.GetReportTaskRequest, dict, ], ) -def test_list_report_tasks(request_type, transport: str = "grpc"): +def test_get_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6287,27 +6310,25 @@ def test_list_report_tasks(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + call.return_value = analytics_data_api.ReportTask( + name="name_value", ) - response = client.list_report_tasks(request) + response = client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReportTasksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == "name_value" -def test_list_report_tasks_empty_call(): +def test_get_report_task_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( @@ -6316,19 +6337,17 @@ def test_list_report_tasks_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_report_tasks() + client.get_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListReportTasksRequest() + assert args[0] == analytics_data_api.GetReportTaskRequest() -def test_list_report_tasks_non_empty_request_with_auto_populated_field(): +def test_get_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -6339,28 +6358,24 @@ def test_list_report_tasks_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.ListReportTasksRequest( - parent="parent_value", - page_token="page_token_value", + request = analytics_data_api.GetReportTaskRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_report_tasks(request=request) + client.get_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListReportTasksRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == analytics_data_api.GetReportTaskRequest( + name="name_value", ) -def test_list_report_tasks_use_cached_wrapped_rpc(): +def test_get_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6374,23 +6389,21 @@ def test_list_report_tasks_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_report_tasks in client._transport._wrapped_methods + assert client._transport.get_report_task in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_report_tasks - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_report_task] = mock_rpc request = {} - client.list_report_tasks(request) + client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_report_tasks(request) + client.get_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6398,7 +6411,7 @@ def test_list_report_tasks_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_report_tasks_empty_call_async(): +async def test_get_report_task_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataAsyncClient( @@ -6407,23 +6420,21 @@ async def test_list_report_tasks_empty_call_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + analytics_data_api.ReportTask( + name="name_value", ) ) - response = await client.list_report_tasks() + response = await client.get_report_task() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListReportTasksRequest() + assert args[0] == analytics_data_api.GetReportTaskRequest() @pytest.mark.asyncio -async def test_list_report_tasks_async_use_cached_wrapped_rpc( +async def test_get_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6440,7 +6451,7 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_report_tasks + client._client._transport.get_report_task in client._client._transport._wrapped_methods ) @@ -6448,16 +6459,16 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_report_tasks + client._client._transport.get_report_task ] = mock_rpc request = {} - await client.list_report_tasks(request) + await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_report_tasks(request) + await client.get_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6465,9 +6476,9 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_report_tasks_async( +async def test_get_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.ListReportTasksRequest, + request_type=analytics_data_api.GetReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6479,50 +6490,46 @@ async def test_list_report_tasks_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + analytics_data_api.ReportTask( + name="name_value", ) ) - response = await client.list_report_tasks(request) + response = await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReportTasksAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_list_report_tasks_async_from_dict(): - await test_list_report_tasks_async(request_type=dict) +async def test_get_report_task_async_from_dict(): + await test_get_report_task_async(request_type=dict) -def test_list_report_tasks_field_headers(): +def test_get_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: - call.return_value = analytics_data_api.ListReportTasksResponse() - client.list_report_tasks(request) + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + call.return_value = analytics_data_api.ReportTask() + client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6533,30 +6540,28 @@ def test_list_report_tasks_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_report_tasks_field_headers_async(): +async def test_get_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetReportTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse() + analytics_data_api.ReportTask() ) - await client.list_report_tasks(request) + await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6567,37 +6572,35 @@ async def test_list_report_tasks_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_report_tasks_flattened(): +def test_get_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse() + call.return_value = analytics_data_api.ReportTask() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_report_tasks( - parent="parent_value", + client.get_report_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_report_tasks_flattened_error(): +def test_get_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6605,45 +6608,43 @@ def test_list_report_tasks_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_report_tasks( - analytics_data_api.ListReportTasksRequest(), - parent="parent_value", + client.get_report_task( + analytics_data_api.GetReportTaskRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_report_tasks_flattened_async(): +async def test_get_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse() + call.return_value = analytics_data_api.ReportTask() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse() + analytics_data_api.ReportTask() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_report_tasks( - parent="parent_value", + response = await client.get_report_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_report_tasks_flattened_error_async(): +async def test_get_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6651,111 +6652,499 @@ async def test_list_report_tasks_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_report_tasks( - analytics_data_api.ListReportTasksRequest(), - parent="parent_value", + await client.get_report_task( + analytics_data_api.GetReportTaskRequest(), + name="name_value", ) -def test_list_report_tasks_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.ListReportTasksRequest, + dict, + ], +) +def test_list_report_tasks(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_report_tasks), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - next_page_token="abc", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[], - next_page_token="def", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", ) - pager = client.list_report_tasks(request={}, retry=retry, timeout=timeout) + response = client.list_report_tasks(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListReportTasksRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analytics_data_api.ReportTask) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportTasksPager) + assert response.next_page_token == "next_page_token_value" -def test_list_report_tasks_pages(transport_name: str = "grpc"): +def test_list_report_tasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_report_tasks), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - next_page_token="abc", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[], - next_page_token="def", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - pages = list(client.list_report_tasks(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - + client.list_report_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListReportTasksRequest() + + +def test_list_report_tasks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.ListReportTasksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_report_tasks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListReportTasksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_report_tasks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_report_tasks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_report_tasks + ] = mock_rpc + request = {} + client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_report_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_report_tasks_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_report_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.ListReportTasksRequest() + + +@pytest.mark.asyncio +async def test_list_report_tasks_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_report_tasks + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_report_tasks + ] = mock_rpc + + request = {} + await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_report_tasks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_report_tasks_async( + transport: str = "grpc_asyncio", + request_type=analytics_data_api.ListReportTasksRequest, +): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.ListReportTasksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReportTasksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_report_tasks_async_from_dict(): + await test_list_report_tasks_async(request_type=dict) + + +def test_list_report_tasks_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListReportTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + call.return_value = analytics_data_api.ListReportTasksResponse() + client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_report_tasks_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.ListReportTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse() + ) + await client.list_report_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_report_tasks_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_report_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_report_tasks_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_report_tasks( + analytics_data_api.ListReportTasksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_report_tasks_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListReportTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListReportTasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_report_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_report_tasks_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_report_tasks( + analytics_data_api.ListReportTasksRequest(), + parent="parent_value", + ) + + +def test_list_report_tasks_pager(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token="abc", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token="def", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_report_tasks(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.ReportTask) for i in results) + + +def test_list_report_tasks_pages(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token="abc", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token="def", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, + ) + pages = list(client.list_report_tasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.asyncio async def test_list_report_tasks_async_pager(): client = AlphaAnalyticsDataAsyncClient( @@ -6859,47 +7248,293 @@ async def test_list_report_tasks_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - analytics_data_api.RunFunnelReportRequest, - dict, - ], -) -def test_run_funnel_report_rest(request_type): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.RunFunnelReportRequest, + dict, + ], +) +def test_run_funnel_report_rest(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RunFunnelReportResponse( + kind="kind_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.RunFunnelReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_funnel_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RunFunnelReportResponse) + assert response.kind == "kind_value" + + +def test_run_funnel_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_funnel_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.run_funnel_report + ] = mock_rpc + + request = {} + client.run_funnel_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_funnel_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_funnel_report_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "post_run_funnel_report" + ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "pre_run_funnel_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = analytics_data_api.RunFunnelReportRequest.pb( + analytics_data_api.RunFunnelReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = analytics_data_api.RunFunnelReportResponse.to_json( + analytics_data_api.RunFunnelReportResponse() + ) + + request = analytics_data_api.RunFunnelReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = analytics_data_api.RunFunnelReportResponse() + + client.run_funnel_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_funnel_report_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.RunFunnelReportRequest +): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"property": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_funnel_report(request) + + +def test_run_funnel_report_rest_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.CreateAudienceListRequest, + dict, + ], +) +def test_create_audience_list_rest(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request_init["audience_list"] = { + "name": "name_value", + "audience": "audience_value", + "audience_display_name": "audience_display_name_value", + "dimensions": [{"dimension_name": "dimension_name_value"}], + "state": 1, + "begin_creating_time": {"seconds": 751, "nanos": 543}, + "creation_quota_tokens_charged": 3070, + "row_count": 992, + "error_message": "error_message_value", + "percentage_completed": 0.2106, + "recurring_audience_list": "recurring_audience_list_value", + "webhook_notification": { + "uri": "uri_value", + "channel_token": "channel_token_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateAudienceListRequest.meta.fields[ + "audience_list" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] - # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["audience_list"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["audience_list"][field])): + del request_init["audience_list"][field][i][subfield] + else: + del request_init["audience_list"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RunFunnelReportResponse( - kind="kind_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_data_api.RunFunnelReportResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.run_funnel_report(request) + response = client.create_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RunFunnelReportResponse) - assert response.kind == "kind_value" + assert response.operation.name == "operations/spam" -def test_run_funnel_report_rest_use_cached_wrapped_rpc(): +def test_create_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6913,7 +7548,9 @@ def test_run_funnel_report_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.run_funnel_report in client._transport._wrapped_methods + assert ( + client._transport.create_audience_list in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -6921,24 +7558,117 @@ def test_run_funnel_report_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.run_funnel_report + client._transport.create_audience_list ] = mock_rpc request = {} - client.run_funnel_report(request) + client.create_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.run_funnel_report(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +def test_create_audience_list_rest_required_fields( + request_type=analytics_data_api.CreateAudienceListRequest, +): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_audience_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_audience_list(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_audience_list_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "audienceList", + ) + ) + ) + + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_run_funnel_report_rest_interceptors(null_interceptor): +def test_create_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6951,14 +7681,16 @@ def test_run_funnel_report_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_run_funnel_report" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "post_create_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_run_funnel_report" + transports.AlphaAnalyticsDataRestInterceptor, "pre_create_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.RunFunnelReportRequest.pb( - analytics_data_api.RunFunnelReportRequest() + pb_message = analytics_data_api.CreateAudienceListRequest.pb( + analytics_data_api.CreateAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -6970,55 +7702,111 @@ def test_run_funnel_report_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.RunFunnelReportResponse.to_json( - analytics_data_api.RunFunnelReportResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = analytics_data_api.RunFunnelReportRequest() + request = analytics_data_api.CreateAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.RunFunnelReportResponse() + post.return_value = operations_pb2.Operation() + + client.create_audience_list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_audience_list_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.CreateAudienceListRequest +): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "properties/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_audience_list(request) + + +def test_create_audience_list_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "properties/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + audience_list=analytics_data_api.AudienceList(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.run_funnel_report( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) + client.create_audience_list(**mock_args) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, + args[1], + ) -def test_run_funnel_report_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.RunFunnelReportRequest -): +def test_create_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"property": "properties/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.run_funnel_report(request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_audience_list( + analytics_data_api.CreateAudienceListRequest(), + parent="parent_value", + audience_list=analytics_data_api.AudienceList(name="name_value"), + ) -def test_run_funnel_report_rest_error(): +def test_create_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7027,125 +7815,44 @@ def test_run_funnel_report_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.CreateAudienceListRequest, + analytics_data_api.QueryAudienceListRequest, dict, ], ) -def test_create_audience_list_rest(request_type): +def test_query_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["audience_list"] = { - "name": "name_value", - "audience": "audience_value", - "audience_display_name": "audience_display_name_value", - "dimensions": [{"dimension_name": "dimension_name_value"}], - "state": 1, - "begin_creating_time": {"seconds": 751, "nanos": 543}, - "creation_quota_tokens_charged": 3070, - "row_count": 992, - "error_message": "error_message_value", - "percentage_completed": 0.2106, - "recurring_audience_list": "recurring_audience_list_value", - "webhook_notification": { - "uri": "uri_value", - "channel_token": "channel_token_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_data_api.CreateAudienceListRequest.meta.fields[ - "audience_list" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["audience_list"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["audience_list"][field])): - del request_init["audience_list"][field][i][subfield] - else: - del request_init["audience_list"][field][subfield] + request_init = {"name": "properties/sample1/audienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.QueryAudienceListResponse( + row_count=992, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_audience_list(request) + response = client.query_audience_list(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, analytics_data_api.QueryAudienceListResponse) + assert response.row_count == 992 -def test_create_audience_list_rest_use_cached_wrapped_rpc(): +def test_query_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7160,7 +7867,7 @@ def test_create_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_audience_list in client._transport._wrapped_methods + client._transport.query_audience_list in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -7169,33 +7876,29 @@ def test_create_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_audience_list + client._transport.query_audience_list ] = mock_rpc request = {} - client.create_audience_list(request) + client.query_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_audience_list(request) + client.query_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_audience_list_rest_required_fields( - request_type=analytics_data_api.CreateAudienceListRequest, +def test_query_audience_list_rest_required_fields( + request_type=analytics_data_api.QueryAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7206,21 +7909,21 @@ def test_create_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_audience_list._get_unset_required_fields(jsonified_request) + ).query_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_audience_list._get_unset_required_fields(jsonified_request) + ).query_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7229,7 +7932,7 @@ def test_create_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.QueryAudienceListResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7249,37 +7952,32 @@ def test_create_audience_list_rest_required_fields( response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_audience_list(request) + response = client.query_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_audience_list_rest_unset_required_fields(): +def test_query_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_audience_list._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "audienceList", - ) - ) - ) + unset_fields = transport.query_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_audience_list_rest_interceptors(null_interceptor): +def test_query_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7292,16 +7990,14 @@ def test_create_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_create_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_query_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_create_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_query_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.CreateAudienceListRequest.pb( - analytics_data_api.CreateAudienceListRequest() + pb_message = analytics_data_api.QueryAudienceListRequest.pb( + analytics_data_api.QueryAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -7313,19 +8009,21 @@ def test_create_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + analytics_data_api.QueryAudienceListResponse.to_json( + analytics_data_api.QueryAudienceListResponse() + ) ) - request = analytics_data_api.CreateAudienceListRequest() + request = analytics_data_api.QueryAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = analytics_data_api.QueryAudienceListResponse() - client.create_audience_list( + client.query_audience_list( request, metadata=[ ("key", "val"), @@ -7337,8 +8035,8 @@ def test_create_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_audience_list_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.CreateAudienceListRequest +def test_query_audience_list_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.QueryAudienceListRequest ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7346,7 +8044,7 @@ def test_create_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/audienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7358,10 +8056,10 @@ def test_create_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_audience_list(request) + client.query_audience_list(request) -def test_create_audience_list_rest_flattened(): +def test_query_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7370,38 +8068,40 @@ def test_create_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.QueryAudienceListResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/audienceLists/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - audience_list=analytics_data_api.AudienceList(name="name_value"), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_audience_list(**mock_args) + client.query_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, + "%s/v1alpha/{name=properties/*/audienceLists/*}:query" + % client.transport._host, args[1], ) -def test_create_audience_list_rest_flattened_error(transport: str = "rest"): +def test_query_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7410,14 +8110,13 @@ def test_create_audience_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_audience_list( - analytics_data_api.CreateAudienceListRequest(), - parent="parent_value", - audience_list=analytics_data_api.AudienceList(name="name_value"), + client.query_audience_list( + analytics_data_api.QueryAudienceListRequest(), + name="name_value", ) -def test_create_audience_list_rest_error(): +def test_query_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7426,11 +8125,11 @@ def test_create_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.QueryAudienceListRequest, + analytics_data_api.SheetExportAudienceListRequest, dict, ], ) -def test_query_audience_list_rest(request_type): +def test_sheet_export_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7443,7 +8142,9 @@ def test_query_audience_list_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryAudienceListResponse( + return_value = analytics_data_api.SheetExportAudienceListResponse( + spreadsheet_uri="spreadsheet_uri_value", + spreadsheet_id="spreadsheet_id_value", row_count=992, ) @@ -7451,19 +8152,23 @@ def test_query_audience_list_rest(request_type): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + return_value = analytics_data_api.SheetExportAudienceListResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.query_audience_list(request) + response = client.sheet_export_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryAudienceListResponse) + assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) + assert response.spreadsheet_uri == "spreadsheet_uri_value" + assert response.spreadsheet_id == "spreadsheet_id_value" assert response.row_count == 992 -def test_query_audience_list_rest_use_cached_wrapped_rpc(): +def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7478,7 +8183,8 @@ def test_query_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.query_audience_list in client._transport._wrapped_methods + client._transport.sheet_export_audience_list + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -7487,24 +8193,24 @@ def test_query_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.query_audience_list + client._transport.sheet_export_audience_list ] = mock_rpc request = {} - client.query_audience_list(request) + client.sheet_export_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.query_audience_list(request) + client.sheet_export_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_query_audience_list_rest_required_fields( - request_type=analytics_data_api.QueryAudienceListRequest, +def test_sheet_export_audience_list_rest_required_fields( + request_type=analytics_data_api.SheetExportAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport @@ -7520,7 +8226,7 @@ def test_query_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).query_audience_list._get_unset_required_fields(jsonified_request) + ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7529,7 +8235,7 @@ def test_query_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).query_audience_list._get_unset_required_fields(jsonified_request) + ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7543,7 +8249,7 @@ def test_query_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryAudienceListResponse() + return_value = analytics_data_api.SheetExportAudienceListResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7565,30 +8271,32 @@ def test_query_audience_list_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + return_value = analytics_data_api.SheetExportAudienceListResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.query_audience_list(request) + response = client.sheet_export_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_query_audience_list_rest_unset_required_fields(): +def test_sheet_export_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.query_audience_list._get_unset_required_fields({}) + unset_fields = transport.sheet_export_audience_list._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_query_audience_list_rest_interceptors(null_interceptor): +def test_sheet_export_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7601,14 +8309,14 @@ def test_query_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_query_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_sheet_export_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_query_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_sheet_export_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.QueryAudienceListRequest.pb( - analytics_data_api.QueryAudienceListRequest() + pb_message = analytics_data_api.SheetExportAudienceListRequest.pb( + analytics_data_api.SheetExportAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -7621,20 +8329,20 @@ def test_query_audience_list_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = ( - analytics_data_api.QueryAudienceListResponse.to_json( - analytics_data_api.QueryAudienceListResponse() + analytics_data_api.SheetExportAudienceListResponse.to_json( + analytics_data_api.SheetExportAudienceListResponse() ) ) - request = analytics_data_api.QueryAudienceListRequest() + request = analytics_data_api.SheetExportAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.QueryAudienceListResponse() + post.return_value = analytics_data_api.SheetExportAudienceListResponse() - client.query_audience_list( + client.sheet_export_audience_list( request, metadata=[ ("key", "val"), @@ -7646,8 +8354,9 @@ def test_query_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_query_audience_list_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.QueryAudienceListRequest +def test_sheet_export_audience_list_rest_bad_request( + transport: str = "rest", + request_type=analytics_data_api.SheetExportAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7667,10 +8376,10 @@ def test_query_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.query_audience_list(request) + client.sheet_export_audience_list(request) -def test_query_audience_list_rest_flattened(): +def test_sheet_export_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7679,7 +8388,7 @@ def test_query_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryAudienceListResponse() + return_value = analytics_data_api.SheetExportAudienceListResponse() # get arguments that satisfy an http rule for this method sample_request = {"name": "properties/sample1/audienceLists/sample2"} @@ -7694,25 +8403,27 @@ def test_query_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + return_value = analytics_data_api.SheetExportAudienceListResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.query_audience_list(**mock_args) + client.sheet_export_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}:query" + "%s/v1alpha/{name=properties/*/audienceLists/*}:exportSheet" % client.transport._host, args[1], ) -def test_query_audience_list_rest_flattened_error(transport: str = "rest"): +def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7721,13 +8432,13 @@ def test_query_audience_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.query_audience_list( - analytics_data_api.QueryAudienceListRequest(), + client.sheet_export_audience_list( + analytics_data_api.SheetExportAudienceListRequest(), name="name_value", ) -def test_query_audience_list_rest_error(): +def test_sheet_export_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7736,11 +8447,11 @@ def test_query_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.SheetExportAudienceListRequest, + analytics_data_api.GetAudienceListRequest, dict, ], ) -def test_sheet_export_audience_list_rest(request_type): +def test_get_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7753,33 +8464,43 @@ def test_sheet_export_audience_list_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse( - spreadsheet_uri="spreadsheet_uri_value", - spreadsheet_id="spreadsheet_id_value", + return_value = analytics_data_api.AudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + state=analytics_data_api.AudienceList.State.CREATING, + creation_quota_tokens_charged=3070, row_count=992, + error_message="error_message_value", + percentage_completed=0.2106, + recurring_audience_list="recurring_audience_list_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.AudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.sheet_export_audience_list(request) + response = client.get_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) - assert response.spreadsheet_uri == "spreadsheet_uri_value" - assert response.spreadsheet_id == "spreadsheet_id_value" + assert isinstance(response, analytics_data_api.AudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.state == analytics_data_api.AudienceList.State.CREATING + assert response.creation_quota_tokens_charged == 3070 assert response.row_count == 992 + assert response.error_message == "error_message_value" + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + assert response.recurring_audience_list == "recurring_audience_list_value" -def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): +def test_get_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7793,10 +8514,7 @@ def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.sheet_export_audience_list - in client._transport._wrapped_methods - ) + assert client._transport.get_audience_list in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -7804,24 +8522,24 @@ def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.sheet_export_audience_list + client._transport.get_audience_list ] = mock_rpc request = {} - client.sheet_export_audience_list(request) + client.get_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.sheet_export_audience_list(request) + client.get_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_sheet_export_audience_list_rest_required_fields( - request_type=analytics_data_api.SheetExportAudienceListRequest, +def test_get_audience_list_rest_required_fields( + request_type=analytics_data_api.GetAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport @@ -7837,7 +8555,7 @@ def test_sheet_export_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) + ).get_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7846,7 +8564,7 @@ def test_sheet_export_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) + ).get_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7860,7 +8578,7 @@ def test_sheet_export_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse() + return_value = analytics_data_api.AudienceList() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -7872,42 +8590,39 @@ def test_sheet_export_audience_list_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.AudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.sheet_export_audience_list(request) + response = client.get_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_sheet_export_audience_list_rest_unset_required_fields(): +def test_get_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.sheet_export_audience_list._get_unset_required_fields({}) + unset_fields = transport.get_audience_list._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_sheet_export_audience_list_rest_interceptors(null_interceptor): +def test_get_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7920,14 +8635,14 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_sheet_export_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_get_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_sheet_export_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.SheetExportAudienceListRequest.pb( - analytics_data_api.SheetExportAudienceListRequest() + pb_message = analytics_data_api.GetAudienceListRequest.pb( + analytics_data_api.GetAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -7939,21 +8654,19 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.SheetExportAudienceListResponse.to_json( - analytics_data_api.SheetExportAudienceListResponse() - ) + req.return_value._content = analytics_data_api.AudienceList.to_json( + analytics_data_api.AudienceList() ) - request = analytics_data_api.SheetExportAudienceListRequest() + request = analytics_data_api.GetAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.SheetExportAudienceListResponse() + post.return_value = analytics_data_api.AudienceList() - client.sheet_export_audience_list( + client.get_audience_list( request, metadata=[ ("key", "val"), @@ -7965,9 +8678,8 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_sheet_export_audience_list_rest_bad_request( - transport: str = "rest", - request_type=analytics_data_api.SheetExportAudienceListRequest, +def test_get_audience_list_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.GetAudienceListRequest ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7987,10 +8699,10 @@ def test_sheet_export_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.sheet_export_audience_list(request) + client.get_audience_list(request) -def test_sheet_export_audience_list_rest_flattened(): +def test_get_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -7999,7 +8711,7 @@ def test_sheet_export_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse() + return_value = analytics_data_api.AudienceList() # get arguments that satisfy an http rule for this method sample_request = {"name": "properties/sample1/audienceLists/sample2"} @@ -8014,27 +8726,24 @@ def test_sheet_export_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.AudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.sheet_export_audience_list(**mock_args) + client.get_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}:exportSheet" - % client.transport._host, + "%s/v1alpha/{name=properties/*/audienceLists/*}" % client.transport._host, args[1], ) -def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest"): +def test_get_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8043,13 +8752,13 @@ def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest" # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.sheet_export_audience_list( - analytics_data_api.SheetExportAudienceListRequest(), + client.get_audience_list( + analytics_data_api.GetAudienceListRequest(), name="name_value", ) -def test_sheet_export_audience_list_rest_error(): +def test_get_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -8058,60 +8767,44 @@ def test_sheet_export_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetAudienceListRequest, + analytics_data_api.ListAudienceListsRequest, dict, ], ) -def test_get_audience_list_rest(request_type): +def test_list_audience_lists_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.AudienceList( - name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - state=analytics_data_api.AudienceList.State.CREATING, - creation_quota_tokens_charged=3070, - row_count=992, - error_message="error_message_value", - percentage_completed=0.2106, - recurring_audience_list="recurring_audience_list_value", + return_value = analytics_data_api.ListAudienceListsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.AudienceList.pb(return_value) + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_audience_list(request) + response = client.list_audience_lists(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.AudienceList) - assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.state == analytics_data_api.AudienceList.State.CREATING - assert response.creation_quota_tokens_charged == 3070 - assert response.row_count == 992 - assert response.error_message == "error_message_value" - assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) - assert response.recurring_audience_list == "recurring_audience_list_value" + assert isinstance(response, pagers.ListAudienceListsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_audience_list_rest_use_cached_wrapped_rpc(): +def test_list_audience_lists_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8125,7 +8818,9 @@ def test_get_audience_list_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_audience_list in client._transport._wrapped_methods + assert ( + client._transport.list_audience_lists in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -8133,29 +8828,29 @@ def test_get_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_audience_list + client._transport.list_audience_lists ] = mock_rpc request = {} - client.get_audience_list(request) + client.list_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_audience_list(request) + client.list_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_audience_list_rest_required_fields( - request_type=analytics_data_api.GetAudienceListRequest, +def test_list_audience_lists_rest_required_fields( + request_type=analytics_data_api.ListAudienceListsRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8166,21 +8861,28 @@ def test_get_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_audience_list._get_unset_required_fields(jsonified_request) + ).list_audience_lists._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_audience_list._get_unset_required_fields(jsonified_request) + ).list_audience_lists._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8189,7 +8891,7 @@ def test_get_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.AudienceList() + return_value = analytics_data_api.ListAudienceListsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8210,30 +8912,38 @@ def test_get_audience_list_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.AudienceList.pb(return_value) + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_audience_list(request) + response = client.list_audience_lists(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_audience_list_rest_unset_required_fields(): +def test_list_audience_lists_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_audience_list._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_audience_lists._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_audience_list_rest_interceptors(null_interceptor): +def test_list_audience_lists_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8246,14 +8956,14 @@ def test_get_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_get_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_list_audience_lists" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_get_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_list_audience_lists" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.GetAudienceListRequest.pb( - analytics_data_api.GetAudienceListRequest() + pb_message = analytics_data_api.ListAudienceListsRequest.pb( + analytics_data_api.ListAudienceListsRequest() ) transcode.return_value = { "method": "post", @@ -8265,19 +8975,21 @@ def test_get_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.AudienceList.to_json( - analytics_data_api.AudienceList() + req.return_value._content = ( + analytics_data_api.ListAudienceListsResponse.to_json( + analytics_data_api.ListAudienceListsResponse() + ) ) - request = analytics_data_api.GetAudienceListRequest() + request = analytics_data_api.ListAudienceListsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.AudienceList() + post.return_value = analytics_data_api.ListAudienceListsResponse() - client.get_audience_list( + client.list_audience_lists( request, metadata=[ ("key", "val"), @@ -8289,8 +9001,8 @@ def test_get_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_audience_list_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.GetAudienceListRequest +def test_list_audience_lists_rest_bad_request( + transport: str = "rest", request_type=analytics_data_api.ListAudienceListsRequest ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8298,7 +9010,7 @@ def test_get_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8310,10 +9022,10 @@ def test_get_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_audience_list(request) + client.list_audience_lists(request) -def test_get_audience_list_rest_flattened(): +def test_list_audience_lists_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8322,14 +9034,14 @@ def test_get_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.AudienceList() + return_value = analytics_data_api.ListAudienceListsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/audienceLists/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -8337,24 +9049,24 @@ def test_get_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.AudienceList.pb(return_value) + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_audience_list(**mock_args) + client.list_audience_lists(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, args[1], ) -def test_get_audience_list_rest_flattened_error(transport: str = "rest"): +def test_list_audience_lists_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8363,26 +9075,83 @@ def test_get_audience_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_audience_list( - analytics_data_api.GetAudienceListRequest(), - name="name_value", + client.list_audience_lists( + analytics_data_api.ListAudienceListsRequest(), + parent="parent_value", ) -def test_get_audience_list_rest_error(): +def test_list_audience_lists_rest_pager(transport: str = "rest"): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_data_api.ListAudienceListsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_audience_lists(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.AudienceList) for i in results) + + pages = list(client.list_audience_lists(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListAudienceListsRequest, + analytics_data_api.CreateRecurringAudienceListRequest, dict, ], ) -def test_list_audience_lists_rest(request_type): +def test_create_recurring_audience_list_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8390,32 +9159,123 @@ def test_list_audience_lists_rest(request_type): # send a request that will satisfy transcoding request_init = {"parent": "properties/sample1"} + request_init["recurring_audience_list"] = { + "name": "name_value", + "audience": "audience_value", + "audience_display_name": "audience_display_name_value", + "dimensions": [{"dimension_name": "dimension_name_value"}], + "active_days_remaining": 2213, + "audience_lists": ["audience_lists_value1", "audience_lists_value2"], + "webhook_notification": { + "uri": "uri_value", + "channel_token": "channel_token_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateRecurringAudienceListRequest.meta.fields[ + "recurring_audience_list" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "recurring_audience_list" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["recurring_audience_list"][field])): + del request_init["recurring_audience_list"][field][i][subfield] + else: + del request_init["recurring_audience_list"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListAudienceListsResponse( - next_page_token="next_page_token_value", + return_value = analytics_data_api.RecurringAudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + active_days_remaining=2213, + audience_lists=["audience_lists_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_audience_lists(request) + response = client.create_recurring_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAudienceListsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.active_days_remaining == 2213 + assert response.audience_lists == ["audience_lists_value"] -def test_list_audience_lists_rest_use_cached_wrapped_rpc(): +def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8430,7 +9290,8 @@ def test_list_audience_lists_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_audience_lists in client._transport._wrapped_methods + client._transport.create_recurring_audience_list + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -8439,24 +9300,24 @@ def test_list_audience_lists_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_audience_lists + client._transport.create_recurring_audience_list ] = mock_rpc request = {} - client.list_audience_lists(request) + client.create_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_audience_lists(request) + client.create_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_audience_lists_rest_required_fields( - request_type=analytics_data_api.ListAudienceListsRequest, +def test_create_recurring_audience_list_rest_required_fields( + request_type=analytics_data_api.CreateRecurringAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport @@ -8472,7 +9333,7 @@ def test_list_audience_lists_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_audience_lists._get_unset_required_fields(jsonified_request) + ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -8481,14 +9342,7 @@ def test_list_audience_lists_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_audience_lists._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -8502,7 +9356,7 @@ def test_list_audience_lists_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListAudienceListsResponse() + return_value = analytics_data_api.RecurringAudienceList() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8514,47 +9368,50 @@ def test_list_audience_lists_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_audience_lists(request) + response = client.create_recurring_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_audience_lists_rest_unset_required_fields(): +def test_create_recurring_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_audience_lists._get_unset_required_fields({}) + unset_fields = transport.create_recurring_audience_list._get_unset_required_fields( + {} + ) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "pageSize", - "pageToken", + "parent", + "recurringAudienceList", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_audience_lists_rest_interceptors(null_interceptor): +def test_create_recurring_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -8567,14 +9424,16 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_list_audience_lists" + transports.AlphaAnalyticsDataRestInterceptor, + "post_create_recurring_audience_list", ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_list_audience_lists" + transports.AlphaAnalyticsDataRestInterceptor, + "pre_create_recurring_audience_list", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.ListAudienceListsRequest.pb( - analytics_data_api.ListAudienceListsRequest() + pb_message = analytics_data_api.CreateRecurringAudienceListRequest.pb( + analytics_data_api.CreateRecurringAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -8586,21 +9445,19 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.ListAudienceListsResponse.to_json( - analytics_data_api.ListAudienceListsResponse() - ) + req.return_value._content = analytics_data_api.RecurringAudienceList.to_json( + analytics_data_api.RecurringAudienceList() ) - request = analytics_data_api.ListAudienceListsRequest() + request = analytics_data_api.CreateRecurringAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.ListAudienceListsResponse() + post.return_value = analytics_data_api.RecurringAudienceList() - client.list_audience_lists( + client.create_recurring_audience_list( request, metadata=[ ("key", "val"), @@ -8612,8 +9469,9 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_audience_lists_rest_bad_request( - transport: str = "rest", request_type=analytics_data_api.ListAudienceListsRequest +def test_create_recurring_audience_list_rest_bad_request( + transport: str = "rest", + request_type=analytics_data_api.CreateRecurringAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8633,10 +9491,10 @@ def test_list_audience_lists_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_audience_lists(request) + client.create_recurring_audience_list(request) -def test_list_audience_lists_rest_flattened(): +def test_create_recurring_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8645,7 +9503,7 @@ def test_list_audience_lists_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListAudienceListsResponse() + return_value = analytics_data_api.RecurringAudienceList() # get arguments that satisfy an http rule for this method sample_request = {"parent": "properties/sample1"} @@ -8653,6 +9511,9 @@ def test_list_audience_lists_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + recurring_audience_list=analytics_data_api.RecurringAudienceList( + name="name_value" + ), ) mock_args.update(sample_request) @@ -8660,24 +9521,25 @@ def test_list_audience_lists_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_audience_lists(**mock_args) + client.create_recurring_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/audienceLists" % client.transport._host, + "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" + % client.transport._host, args[1], ) -def test_list_audience_lists_rest_flattened_error(transport: str = "rest"): +def test_create_recurring_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8686,173 +9548,36 @@ def test_list_audience_lists_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_audience_lists( - analytics_data_api.ListAudienceListsRequest(), + client.create_recurring_audience_list( + analytics_data_api.CreateRecurringAudienceListRequest(), parent="parent_value", - ) - - -def test_list_audience_lists_rest_pager(transport: str = "rest"): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], + recurring_audience_list=analytics_data_api.RecurringAudienceList( + name="name_value" ), ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_data_api.ListAudienceListsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_audience_lists(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analytics_data_api.AudienceList) for i in results) - - pages = list(client.list_audience_lists(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_data_api.CreateRecurringAudienceListRequest, - dict, - ], -) -def test_create_recurring_audience_list_rest(request_type): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["recurring_audience_list"] = { - "name": "name_value", - "audience": "audience_value", - "audience_display_name": "audience_display_name_value", - "dimensions": [{"dimension_name": "dimension_name_value"}], - "active_days_remaining": 2213, - "audience_lists": ["audience_lists_value1", "audience_lists_value2"], - "webhook_notification": { - "uri": "uri_value", - "channel_token": "channel_token_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_data_api.CreateRecurringAudienceListRequest.meta.fields[ - "recurring_audience_list" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "recurring_audience_list" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_create_recurring_audience_list_rest_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["recurring_audience_list"][field])): - del request_init["recurring_audience_list"][field][i][subfield] - else: - del request_init["recurring_audience_list"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.GetRecurringAudienceListRequest, + dict, + ], +) +def test_get_recurring_audience_list_rest(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -8875,7 +9600,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_recurring_audience_list(request) + response = client.get_recurring_audience_list(request) # Establish that the response is the type that we expect. assert isinstance(response, analytics_data_api.RecurringAudienceList) @@ -8886,7 +9611,7 @@ def get_message_fields(field): assert response.audience_lists == ["audience_lists_value"] -def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): +def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8901,7 +9626,7 @@ def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_recurring_audience_list + client._transport.get_recurring_audience_list in client._transport._wrapped_methods ) @@ -8911,29 +9636,29 @@ def test_create_recurring_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_recurring_audience_list + client._transport.get_recurring_audience_list ] = mock_rpc request = {} - client.create_recurring_audience_list(request) + client.get_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_recurring_audience_list(request) + client.get_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_recurring_audience_list_rest_required_fields( - request_type=analytics_data_api.CreateRecurringAudienceListRequest, +def test_get_recurring_audience_list_rest_required_fields( + request_type=analytics_data_api.GetRecurringAudienceListRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8944,21 +9669,21 @@ def test_create_recurring_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8979,10 +9704,9 @@ def test_create_recurring_audience_list_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -8995,34 +9719,24 @@ def test_create_recurring_audience_list_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_recurring_audience_list(request) + response = client.get_recurring_audience_list(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_recurring_audience_list_rest_unset_required_fields(): +def test_get_recurring_audience_list_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_recurring_audience_list._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "recurringAudienceList", - ) - ) - ) + unset_fields = transport.get_recurring_audience_list._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_recurring_audience_list_rest_interceptors(null_interceptor): +def test_get_recurring_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9035,16 +9749,14 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "post_create_recurring_audience_list", + transports.AlphaAnalyticsDataRestInterceptor, "post_get_recurring_audience_list" ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "pre_create_recurring_audience_list", + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_recurring_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.CreateRecurringAudienceListRequest.pb( - analytics_data_api.CreateRecurringAudienceListRequest() + pb_message = analytics_data_api.GetRecurringAudienceListRequest.pb( + analytics_data_api.GetRecurringAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -9060,7 +9772,7 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): analytics_data_api.RecurringAudienceList() ) - request = analytics_data_api.CreateRecurringAudienceListRequest() + request = analytics_data_api.GetRecurringAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -9068,7 +9780,7 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = analytics_data_api.RecurringAudienceList() - client.create_recurring_audience_list( + client.get_recurring_audience_list( request, metadata=[ ("key", "val"), @@ -9080,9 +9792,9 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_recurring_audience_list_rest_bad_request( +def test_get_recurring_audience_list_rest_bad_request( transport: str = "rest", - request_type=analytics_data_api.CreateRecurringAudienceListRequest, + request_type=analytics_data_api.GetRecurringAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9090,7 +9802,7 @@ def test_create_recurring_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9102,10 +9814,10 @@ def test_create_recurring_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_recurring_audience_list(request) + client.get_recurring_audience_list(request) -def test_create_recurring_audience_list_rest_flattened(): +def test_get_recurring_audience_list_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9117,14 +9829,11 @@ def test_create_recurring_audience_list_rest_flattened(): return_value = analytics_data_api.RecurringAudienceList() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/recurringAudienceLists/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - recurring_audience_list=analytics_data_api.RecurringAudienceList( - name="name_value" - ), + name="name_value", ) mock_args.update(sample_request) @@ -9137,20 +9846,20 @@ def test_create_recurring_audience_list_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_recurring_audience_list(**mock_args) + client.get_recurring_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" + "%s/v1alpha/{name=properties/*/recurringAudienceLists/*}" % client.transport._host, args[1], ) -def test_create_recurring_audience_list_rest_flattened_error(transport: str = "rest"): +def test_get_recurring_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9159,16 +9868,13 @@ def test_create_recurring_audience_list_rest_flattened_error(transport: str = "r # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_recurring_audience_list( - analytics_data_api.CreateRecurringAudienceListRequest(), - parent="parent_value", - recurring_audience_list=analytics_data_api.RecurringAudienceList( - name="name_value" - ), + client.get_recurring_audience_list( + analytics_data_api.GetRecurringAudienceListRequest(), + name="name_value", ) -def test_create_recurring_audience_list_rest_error(): +def test_get_recurring_audience_list_rest_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -9177,52 +9883,46 @@ def test_create_recurring_audience_list_rest_error(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetRecurringAudienceListRequest, + analytics_data_api.ListRecurringAudienceListsRequest, dict, ], ) -def test_get_recurring_audience_list_rest(request_type): +def test_list_recurring_audience_lists_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList( - name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - active_days_remaining=2213, - audience_lists=["audience_lists_value"], + return_value = analytics_data_api.ListRecurringAudienceListsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_recurring_audience_list(request) + response = client.list_recurring_audience_lists(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RecurringAudienceList) - assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.active_days_remaining == 2213 - assert response.audience_lists == ["audience_lists_value"] + assert isinstance(response, pagers.ListRecurringAudienceListsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): +def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9237,7 +9937,7 @@ def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_recurring_audience_list + client._transport.list_recurring_audience_lists in client._transport._wrapped_methods ) @@ -9247,29 +9947,29 @@ def test_get_recurring_audience_list_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_recurring_audience_list + client._transport.list_recurring_audience_lists ] = mock_rpc request = {} - client.get_recurring_audience_list(request) + client.list_recurring_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_recurring_audience_list(request) + client.list_recurring_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_recurring_audience_list_rest_required_fields( - request_type=analytics_data_api.GetRecurringAudienceListRequest, +def test_list_recurring_audience_lists_rest_required_fields( + request_type=analytics_data_api.ListRecurringAudienceListsRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9280,21 +9980,28 @@ def test_get_recurring_audience_list_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_recurring_audience_list._get_unset_required_fields(jsonified_request) + ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9303,7 +10010,7 @@ def test_get_recurring_audience_list_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList() + return_value = analytics_data_api.ListRecurringAudienceListsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9324,30 +10031,42 @@ def test_get_recurring_audience_list_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_recurring_audience_list(request) + response = client.list_recurring_audience_lists(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_recurring_audience_list_rest_unset_required_fields(): +def test_list_recurring_audience_lists_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_recurring_audience_list._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_recurring_audience_lists._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_recurring_audience_list_rest_interceptors(null_interceptor): +def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9360,14 +10079,16 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_get_recurring_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, + "post_list_recurring_audience_lists", ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_get_recurring_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, + "pre_list_recurring_audience_lists", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.GetRecurringAudienceListRequest.pb( - analytics_data_api.GetRecurringAudienceListRequest() + pb_message = analytics_data_api.ListRecurringAudienceListsRequest.pb( + analytics_data_api.ListRecurringAudienceListsRequest() ) transcode.return_value = { "method": "post", @@ -9379,19 +10100,21 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = analytics_data_api.RecurringAudienceList.to_json( - analytics_data_api.RecurringAudienceList() + req.return_value._content = ( + analytics_data_api.ListRecurringAudienceListsResponse.to_json( + analytics_data_api.ListRecurringAudienceListsResponse() + ) ) - request = analytics_data_api.GetRecurringAudienceListRequest() + request = analytics_data_api.ListRecurringAudienceListsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.RecurringAudienceList() + post.return_value = analytics_data_api.ListRecurringAudienceListsResponse() - client.get_recurring_audience_list( + client.list_recurring_audience_lists( request, metadata=[ ("key", "val"), @@ -9403,9 +10126,9 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_recurring_audience_list_rest_bad_request( +def test_list_recurring_audience_lists_rest_bad_request( transport: str = "rest", - request_type=analytics_data_api.GetRecurringAudienceListRequest, + request_type=analytics_data_api.ListRecurringAudienceListsRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9413,7 +10136,7 @@ def test_get_recurring_audience_list_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9425,10 +10148,10 @@ def test_get_recurring_audience_list_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_recurring_audience_list(request) + client.list_recurring_audience_lists(request) -def test_get_recurring_audience_list_rest_flattened(): +def test_list_recurring_audience_lists_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9437,14 +10160,14 @@ def test_get_recurring_audience_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList() + return_value = analytics_data_api.ListRecurringAudienceListsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/recurringAudienceLists/sample2"} + sample_request = {"parent": "properties/sample1"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -9452,25 +10175,27 @@ def test_get_recurring_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_recurring_audience_list(**mock_args) + client.list_recurring_audience_lists(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/recurringAudienceLists/*}" + "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" % client.transport._host, args[1], ) -def test_get_recurring_audience_list_rest_flattened_error(transport: str = "rest"): +def test_list_recurring_audience_lists_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9479,61 +10204,119 @@ def test_get_recurring_audience_list_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_recurring_audience_list( - analytics_data_api.GetRecurringAudienceListRequest(), - name="name_value", + client.list_recurring_audience_lists( + analytics_data_api.ListRecurringAudienceListsRequest(), + parent="parent_value", ) -def test_get_recurring_audience_list_rest_error(): +def test_list_recurring_audience_lists_rest_pager(transport: str = "rest"): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + analytics_data_api.ListRecurringAudienceListsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "properties/sample1"} + + pager = client.list_recurring_audience_lists(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, analytics_data_api.RecurringAudienceList) for i in results + ) + + pages = list(client.list_recurring_audience_lists(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListRecurringAudienceListsRequest, + analytics_data_api.GetPropertyQuotasSnapshotRequest, dict, ], ) -def test_list_recurring_audience_lists_rest(request_type): +def test_get_property_quotas_snapshot_rest(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/propertyQuotasSnapshot"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListRecurringAudienceListsResponse( - next_page_token="next_page_token_value", + return_value = analytics_data_api.PropertyQuotasSnapshot( + name="name_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( - return_value - ) + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_recurring_audience_lists(request) + response = client.get_property_quotas_snapshot(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRecurringAudienceListsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) + assert response.name == "name_value" -def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): +def test_get_property_quotas_snapshot_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9548,7 +10331,7 @@ def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_recurring_audience_lists + client._transport.get_property_quotas_snapshot in client._transport._wrapped_methods ) @@ -9558,29 +10341,29 @@ def test_list_recurring_audience_lists_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_recurring_audience_lists + client._transport.get_property_quotas_snapshot ] = mock_rpc request = {} - client.list_recurring_audience_lists(request) + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_recurring_audience_lists(request) + client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_recurring_audience_lists_rest_required_fields( - request_type=analytics_data_api.ListRecurringAudienceListsRequest, +def test_get_property_quotas_snapshot_rest_required_fields( + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): transport_class = transports.AlphaAnalyticsDataRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9591,28 +10374,21 @@ def test_list_recurring_audience_lists_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) + ).get_property_quotas_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_recurring_audience_lists._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + ).get_property_quotas_snapshot._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9621,7 +10397,7 @@ def test_list_recurring_audience_lists_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListRecurringAudienceListsResponse() + return_value = analytics_data_api.PropertyQuotasSnapshot() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9642,42 +10418,30 @@ def test_list_recurring_audience_lists_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( - return_value - ) + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_recurring_audience_lists(request) + response = client.get_property_quotas_snapshot(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_recurring_audience_lists_rest_unset_required_fields(): +def test_get_property_quotas_snapshot_rest_unset_required_fields(): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_recurring_audience_lists._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_property_quotas_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): +def test_get_property_quotas_snapshot_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -9691,15 +10455,14 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): path_template, "transcode" ) as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_list_recurring_audience_lists", + "post_get_property_quotas_snapshot", ) as post, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "pre_list_recurring_audience_lists", + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_property_quotas_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = analytics_data_api.ListRecurringAudienceListsRequest.pb( - analytics_data_api.ListRecurringAudienceListsRequest() + pb_message = analytics_data_api.GetPropertyQuotasSnapshotRequest.pb( + analytics_data_api.GetPropertyQuotasSnapshotRequest() ) transcode.return_value = { "method": "post", @@ -9711,21 +10474,19 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = ( - analytics_data_api.ListRecurringAudienceListsResponse.to_json( - analytics_data_api.ListRecurringAudienceListsResponse() - ) + req.return_value._content = analytics_data_api.PropertyQuotasSnapshot.to_json( + analytics_data_api.PropertyQuotasSnapshot() ) - request = analytics_data_api.ListRecurringAudienceListsRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.ListRecurringAudienceListsResponse() + post.return_value = analytics_data_api.PropertyQuotasSnapshot() - client.list_recurring_audience_lists( + client.get_property_quotas_snapshot( request, metadata=[ ("key", "val"), @@ -9737,9 +10498,9 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_recurring_audience_lists_rest_bad_request( +def test_get_property_quotas_snapshot_rest_bad_request( transport: str = "rest", - request_type=analytics_data_api.ListRecurringAudienceListsRequest, + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9747,7 +10508,7 @@ def test_list_recurring_audience_lists_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/propertyQuotasSnapshot"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9759,10 +10520,10 @@ def test_list_recurring_audience_lists_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_recurring_audience_lists(request) + client.get_property_quotas_snapshot(request) -def test_list_recurring_audience_lists_rest_flattened(): +def test_get_property_quotas_snapshot_rest_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9771,14 +10532,14 @@ def test_list_recurring_audience_lists_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListRecurringAudienceListsResponse() + return_value = analytics_data_api.PropertyQuotasSnapshot() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "properties/sample1"} + sample_request = {"name": "properties/sample1/propertyQuotasSnapshot"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -9786,27 +10547,25 @@ def test_list_recurring_audience_lists_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( - return_value - ) + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_recurring_audience_lists(**mock_args) + client.get_property_quotas_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=properties/*}/recurringAudienceLists" + "%s/v1alpha/{name=properties/*/propertyQuotasSnapshot}" % client.transport._host, args[1], ) -def test_list_recurring_audience_lists_rest_flattened_error(transport: str = "rest"): +def test_get_property_quotas_snapshot_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9815,77 +10574,17 @@ def test_list_recurring_audience_lists_rest_flattened_error(transport: str = "re # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_recurring_audience_lists( - analytics_data_api.ListRecurringAudienceListsRequest(), - parent="parent_value", + client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), + name="name_value", ) -def test_list_recurring_audience_lists_rest_pager(transport: str = "rest"): +def test_get_property_quotas_snapshot_rest_error(): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - analytics_data_api.ListRecurringAudienceListsResponse.to_json(x) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "properties/sample1"} - - pager = client.list_recurring_audience_lists(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, analytics_data_api.RecurringAudienceList) for i in results - ) - - pages = list(client.list_recurring_audience_lists(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", @@ -11498,6 +12197,7 @@ def test_alpha_analytics_data_base_transport(): "create_recurring_audience_list", "get_recurring_audience_list", "list_recurring_audience_lists", + "get_property_quotas_snapshot", "create_report_task", "query_report_task", "get_report_task", @@ -11835,6 +12535,9 @@ def test_alpha_analytics_data_client_transport_session_collision(transport_name) session1 = client1.transport.list_recurring_audience_lists._session session2 = client2.transport.list_recurring_audience_lists._session assert session1 != session2 + session1 = client1.transport.get_property_quotas_snapshot._session + session2 = client2.transport.get_property_quotas_snapshot._session + assert session1 != session2 session1 = client1.transport.create_report_task._session session2 = client2.transport.create_report_task._session assert session1 != session2 @@ -12032,9 +12735,29 @@ def test_parse_audience_list_path(): assert expected == actual -def test_recurring_audience_list_path(): +def test_property_quotas_snapshot_path(): property = "oyster" - recurring_audience_list = "nudibranch" + expected = "properties/{property}/propertyQuotasSnapshot".format( + property=property, + ) + actual = AlphaAnalyticsDataClient.property_quotas_snapshot_path(property) + assert expected == actual + + +def test_parse_property_quotas_snapshot_path(): + expected = { + "property": "nudibranch", + } + path = AlphaAnalyticsDataClient.property_quotas_snapshot_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_property_quotas_snapshot_path(path) + assert expected == actual + + +def test_recurring_audience_list_path(): + property = "cuttlefish" + recurring_audience_list = "mussel" expected = ( "properties/{property}/recurringAudienceLists/{recurring_audience_list}".format( property=property, @@ -12049,8 +12772,8 @@ def test_recurring_audience_list_path(): def test_parse_recurring_audience_list_path(): expected = { - "property": "cuttlefish", - "recurring_audience_list": "mussel", + "property": "winkle", + "recurring_audience_list": "nautilus", } path = AlphaAnalyticsDataClient.recurring_audience_list_path(**expected) @@ -12060,8 +12783,8 @@ def test_parse_recurring_audience_list_path(): def test_report_task_path(): - property = "winkle" - report_task = "nautilus" + property = "scallop" + report_task = "abalone" expected = "properties/{property}/reportTasks/{report_task}".format( property=property, report_task=report_task, @@ -12072,8 +12795,8 @@ def test_report_task_path(): def test_parse_report_task_path(): expected = { - "property": "scallop", - "report_task": "abalone", + "property": "squid", + "report_task": "clam", } path = AlphaAnalyticsDataClient.report_task_path(**expected) @@ -12083,7 +12806,7 @@ def test_parse_report_task_path(): def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -12093,7 +12816,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "octopus", } path = AlphaAnalyticsDataClient.common_billing_account_path(**expected) @@ -12103,7 +12826,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "whelk" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -12113,7 +12836,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "nudibranch", } path = AlphaAnalyticsDataClient.common_folder_path(**expected) @@ -12123,7 +12846,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "oyster" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -12133,7 +12856,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "mussel", } path = AlphaAnalyticsDataClient.common_organization_path(**expected) @@ -12143,7 +12866,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "cuttlefish" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -12153,7 +12876,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "nautilus", } path = AlphaAnalyticsDataClient.common_project_path(**expected) @@ -12163,8 +12886,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -12175,8 +12898,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "squid", + "location": "clam", } path = AlphaAnalyticsDataClient.common_location_path(**expected) diff --git a/packages/google-apps-chat/CHANGELOG.md b/packages/google-apps-chat/CHANGELOG.md index ec2485ff2775..4e560ca6d23f 100644 --- a/packages/google-apps-chat/CHANGELOG.md +++ b/packages/google-apps-chat/CHANGELOG.md @@ -1,5 +1,39 @@ # Changelog +## [0.1.12](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.11...google-apps-chat-v0.1.12) (2024-10-08) + + +### Features + +* Add doc for import mode external users support ([3881914](https://github.com/googleapis/google-cloud-python/commit/3881914b43b47bf2ee187f62447ef9eccc851749)) +* Add doc for permission settings & announcement space support ([3881914](https://github.com/googleapis/google-cloud-python/commit/3881914b43b47bf2ee187f62447ef9eccc851749)) + + +### Documentation + +* Discoverable space docs improvement ([3881914](https://github.com/googleapis/google-cloud-python/commit/3881914b43b47bf2ee187f62447ef9eccc851749)) +* Memberships API dev docs improvement ([3881914](https://github.com/googleapis/google-cloud-python/commit/3881914b43b47bf2ee187f62447ef9eccc851749)) +* Messages API dev docs improvement ([3881914](https://github.com/googleapis/google-cloud-python/commit/3881914b43b47bf2ee187f62447ef9eccc851749)) + +## [0.1.11](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.10...google-apps-chat-v0.1.11) (2024-09-16) + + +### Features + +* If you're a domain administrator or a delegated administrator, you can now include the `useAdminAccess` parameter when you call the Chat API with your administrator privileges with the following methods to manage Chat spaces and memberships in your Workspace organization: ([a20b1e5](https://github.com/googleapis/google-cloud-python/commit/a20b1e508068845c36b1701836ba17a699cb10ac)) + + +### Documentation + +* A comment for field `filter` in message `.google.chat.v1.ListMembershipsRequest` is updated to support `!=` operator ([a20b1e5](https://github.com/googleapis/google-cloud-python/commit/a20b1e508068845c36b1701836ba17a699cb10ac)) + +## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.9...google-apps-chat-v0.1.10) (2024-09-05) + + +### Features + +* [google-apps-chat] Add CHAT_SPACE link type support for GA launch ([#13064](https://github.com/googleapis/google-cloud-python/issues/13064)) ([0ee300a](https://github.com/googleapis/google-cloud-python/commit/0ee300a0497968aa2c85969924b37f95f67675f0)) + ## [0.1.9](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.8...google-apps-chat-v0.1.9) (2024-07-30) diff --git a/packages/google-apps-chat/chat-v1-py.tar.gz b/packages/google-apps-chat/chat-v1-py.tar.gz index e69de29bb2d1..496d03ec6ccf 100644 Binary files a/packages/google-apps-chat/chat-v1-py.tar.gz and b/packages/google-apps-chat/chat-v1-py.tar.gz differ diff --git a/packages/google-apps-chat/google/apps/chat/__init__.py b/packages/google-apps-chat/google/apps/chat/__init__.py index b50c0acffe6f..17679ffaa12a 100644 --- a/packages/google-apps-chat/google/apps/chat/__init__.py +++ b/packages/google-apps-chat/google/apps/chat/__init__.py @@ -26,6 +26,7 @@ from google.apps.chat_v1.types.annotation import ( Annotation, AnnotationType, + ChatSpaceLinkData, DriveLinkData, RichLinkMetadata, SlashCommandMetadata, @@ -110,6 +111,8 @@ GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, + SearchSpacesRequest, + SearchSpacesResponse, Space, UpdateSpaceRequest, ) @@ -137,6 +140,7 @@ "ChatServiceAsyncClient", "ActionStatus", "Annotation", + "ChatSpaceLinkData", "DriveLinkData", "RichLinkMetadata", "SlashCommandMetadata", @@ -210,6 +214,8 @@ "GetSpaceRequest", "ListSpacesRequest", "ListSpacesResponse", + "SearchSpacesRequest", + "SearchSpacesResponse", "Space", "UpdateSpaceRequest", "GetSpaceEventRequest", diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index 558c8aab67c5..17bbab4c1877 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.12" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/__init__.py index 94c3b6f7fef3..d770a6fbb7ff 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/__init__.py @@ -23,6 +23,7 @@ from .types.annotation import ( Annotation, AnnotationType, + ChatSpaceLinkData, DriveLinkData, RichLinkMetadata, SlashCommandMetadata, @@ -107,6 +108,8 @@ GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, + SearchSpacesRequest, + SearchSpacesResponse, Space, UpdateSpaceRequest, ) @@ -138,6 +141,7 @@ "AttachmentDataRef", "CardWithId", "ChatServiceClient", + "ChatSpaceLinkData", "CompleteImportSpaceRequest", "CompleteImportSpaceResponse", "ContextualAddOnMarkup", @@ -199,6 +203,8 @@ "ReactionCreatedEventData", "ReactionDeletedEventData", "RichLinkMetadata", + "SearchSpacesRequest", + "SearchSpacesResponse", "SetUpSpaceRequest", "SlashCommand", "SlashCommandMetadata", diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json b/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json index 3cdf90b4e07e..325f347acf03 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json @@ -120,6 +120,11 @@ "list_spaces" ] }, + "SearchSpaces": { + "methods": [ + "search_spaces" + ] + }, "SetUpSpace": { "methods": [ "set_up_space" @@ -265,6 +270,11 @@ "list_spaces" ] }, + "SearchSpaces": { + "methods": [ + "search_spaces" + ] + }, "SetUpSpace": { "methods": [ "set_up_space" @@ -410,6 +420,11 @@ "list_spaces" ] }, + "SearchSpaces": { + "methods": [ + "search_spaces" + ] + }, "SetUpSpace": { "methods": [ "set_up_space" diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index 558c8aab67c5..17bbab4c1877 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.12" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py index 945434859bde..8e88d84deba6 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py @@ -314,19 +314,36 @@ async def create_message( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_message.Message: - r"""Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + r"""Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication async client| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication async client| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication async client| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication async client| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg .. code-block:: python @@ -464,9 +481,12 @@ async def list_messages( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMessagesAsyncPager: r"""Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. .. code-block:: python @@ -1535,6 +1555,103 @@ async def sample_list_spaces(): # Done; return the response. return response + async def search_spaces( + self, + request: Optional[Union[space.SearchSpacesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchSpacesAsyncPager: + r"""Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + async def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.chat_v1.types.SearchSpacesRequest, dict]]): + The request object. Request to search for a list of + spaces based on a query. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.SearchSpacesAsyncPager: + Response with a list of spaces + corresponding to the search spaces + request. Iterating over this object + will yield results and resolve + additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, space.SearchSpacesRequest): + request = space.SearchSpacesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.search_spaces + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchSpacesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def get_space( self, request: Optional[Union[space.GetSpaceRequest, dict]] = None, @@ -1662,8 +1779,9 @@ async def create_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_space.Space: - r"""Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + r"""Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -1671,6 +1789,11 @@ async def create_space( space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -1690,7 +1813,11 @@ async def sample_create_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request @@ -1701,16 +1828,22 @@ async def sample_create_space(): Args: request (Optional[Union[google.apps.chat_v1.types.CreateSpaceRequest, dict]]): - The request object. A request to create a named space. + The request object. A request to create a named space + with no members. space (:class:`google.apps.chat_v1.types.Space`): Required. The ``displayName`` and ``spaceType`` fields must be populated. Only ``SpaceType.SPACE`` is supported. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space, try a different ``displayName``. An - existing space within the Google Workspace organization - might already use this display name. + If you receive the error message ``ALREADY_EXISTS``, try + a different ``displayName``. An existing space within + the Google Workspace organization might already use this + display name. + + If you're a member of the `Developer Preview + program `__, + ``SpaceType.GROUP_CHAT`` can be used if ``importMode`` + is set to true. The space ``name`` is assigned on the server so anything specified in this field will be ignored. @@ -1855,7 +1988,11 @@ async def sample_set_up_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request @@ -1945,7 +2082,11 @@ async def sample_update_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request @@ -1970,68 +2111,73 @@ async def sample_update_space(): Required. The updated field paths, comma separated if there are multiple. - Currently supported field paths: - - - ``display_name`` (Only supports changing the display - name of a space with the ``SPACE`` type, or when also - including the ``space_type`` mask to change a - ``GROUP_CHAT`` space type to ``SPACE``. Trying to - update the display name of a ``GROUP_CHAT`` or a - ``DIRECT_MESSAGE`` space results in an invalid - argument error. If you receive the error message - ``ALREADY_EXISTS`` when updating the ``displayName``, - try a different ``displayName``. An existing space - within the Google Workspace organization might - already use this display name.) - - - ``space_type`` (Only supports changing a - ``GROUP_CHAT`` space type to ``SPACE``. Include - ``display_name`` together with ``space_type`` in the - update mask and ensure that the specified space has a - non-empty display name and the ``SPACE`` space type. - Including the ``space_type`` mask and the ``SPACE`` - type in the specified space when updating the display - name is optional if the existing space already has - the ``SPACE`` type. Trying to update the space type - in other ways results in an invalid argument error). - ``space_type`` is not supported with admin access. - - - ``space_details`` - - - ``space_history_state`` (Supports `turning history on - or off for the - space `__ - if `the organization allows users to change their - history - setting `__. - Warning: mutually exclusive with all other field - paths.) ``space_history_state`` is not supported with - admin access. - - - ``access_settings.audience`` (Supports changing the - `access - setting `__ - of who can discover the space, join the space, and - preview the messages in space. If no audience is - specified in the access setting, the space's access - setting is updated to private. Warning: mutually - exclusive with all other field paths.) - ``access_settings.audience`` is not supported with - admin access. - - - Developer Preview: Supports changing the `permission - settings `__ - of a space, supported field paths include: - ``permission_settings.manage_members_and_groups``, - ``permission_settings.modify_space_details``, - ``permission_settings.toggle_history``, - ``permission_settings.use_at_mention_all``, - ``permission_settings.manage_apps``, - ``permission_settings.manage_webhooks``, - ``permission_settings.reply_messages`` (Warning: - mutually exclusive with all other non-permission - settings field paths). ``permission_settings`` is not - supported with admin access. + You can update the following fields for a space: + + ``space_details``: Updates the space's description. + Supports up to 150 characters. + + ``display_name``: Only supports updating the display + name for spaces where ``spaceType`` field is ``SPACE``. + If you receive the error message ``ALREADY_EXISTS``, try + a different value. An existing space within the Google + Workspace organization might already use this display + name. + + ``space_type``: Only supports changing a ``GROUP_CHAT`` + space type to ``SPACE``. Include ``display_name`` + together with ``space_type`` in the update mask and + ensure that the specified space has a non-empty display + name and the ``SPACE`` space type. Including the + ``space_type`` mask and the ``SPACE`` type in the + specified space when updating the display name is + optional if the existing space already has the ``SPACE`` + type. Trying to update the space type in other ways + results in an invalid argument error. ``space_type`` is + not supported with ``useAdminAccess``. + + ``space_history_state``: Updates `space history + settings `__ + by turning history on or off for the space. Only + supported if history settings are enabled for the Google + Workspace organization. To update the space history + state, you must omit all other field masks in your + request. ``space_history_state`` is not supported with + ``useAdminAccess``. + + ``access_settings.audience``: Updates the `access + setting `__ + of who can discover the space, join the space, and + preview the messages in named space where ``spaceType`` + field is ``SPACE``. If the existing space has a target + audience, you can remove the audience and restrict space + access by omitting a value for this field mask. To + update access settings for a space, the authenticating + user must be a space manager and omit all other field + masks in your request. You can't update this field if + the space is in `import + mode `__. + To learn more, see `Make a space discoverable to + specific + users `__. + ``access_settings.audience`` is not supported with + ``useAdminAccess``. + + ``permission_settings``: Supports changing the + `permission + settings `__ + of a space. When updating permission settings, you can + only specify ``permissionSettings`` field masks; you + cannot update other field masks at the same time. + ``permissionSettings`` is not supported with + ``useAdminAccess``. The supported field masks include: + + - ``permission_settings.manageMembersAndGroups`` + - ``permission_settings.modifySpaceDetails`` + - ``permission_settings.toggleHistory`` + - ``permission_settings.useAtMentionAll`` + - ``permission_settings.manageApps`` + - ``permission_settings.manageWebhooks`` + - ``permission_settings.replyMessages`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2398,46 +2544,25 @@ async def create_membership( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_membership.Membership: - r"""Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + r"""Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. .. code-block:: python @@ -3597,6 +3722,9 @@ async def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py index 268ff696d3a6..aff7158000f3 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py @@ -878,19 +878,36 @@ def create_message( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_message.Message: - r"""Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + r"""Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication client| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication client| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication client| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication client| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg .. code-block:: python @@ -1025,9 +1042,12 @@ def list_messages( metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMessagesPager: r"""Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. .. code-block:: python @@ -2071,6 +2091,101 @@ def sample_list_spaces(): # Done; return the response. return response + def search_spaces( + self, + request: Optional[Union[space.SearchSpacesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchSpacesPager: + r"""Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.chat_v1.types.SearchSpacesRequest, dict]): + The request object. Request to search for a list of + spaces based on a query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.SearchSpacesPager: + Response with a list of spaces + corresponding to the search spaces + request. Iterating over this object + will yield results and resolve + additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, space.SearchSpacesRequest): + request = space.SearchSpacesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_spaces] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchSpacesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def get_space( self, request: Optional[Union[space.GetSpaceRequest, dict]] = None, @@ -2195,8 +2310,9 @@ def create_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_space.Space: - r"""Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + r"""Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -2204,6 +2320,11 @@ def create_space( space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -2223,7 +2344,11 @@ def sample_create_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request @@ -2234,16 +2359,22 @@ def sample_create_space(): Args: request (Union[google.apps.chat_v1.types.CreateSpaceRequest, dict]): - The request object. A request to create a named space. + The request object. A request to create a named space + with no members. space (google.apps.chat_v1.types.Space): Required. The ``displayName`` and ``spaceType`` fields must be populated. Only ``SpaceType.SPACE`` is supported. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space, try a different ``displayName``. An - existing space within the Google Workspace organization - might already use this display name. + If you receive the error message ``ALREADY_EXISTS``, try + a different ``displayName``. An existing space within + the Google Workspace organization might already use this + display name. + + If you're a member of the `Developer Preview + program `__, + ``SpaceType.GROUP_CHAT`` can be used if ``importMode`` + is set to true. The space ``name`` is assigned on the server so anything specified in this field will be ignored. @@ -2385,7 +2516,11 @@ def sample_set_up_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request @@ -2473,7 +2608,11 @@ def sample_update_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request @@ -2498,68 +2637,73 @@ def sample_update_space(): Required. The updated field paths, comma separated if there are multiple. - Currently supported field paths: - - - ``display_name`` (Only supports changing the display - name of a space with the ``SPACE`` type, or when also - including the ``space_type`` mask to change a - ``GROUP_CHAT`` space type to ``SPACE``. Trying to - update the display name of a ``GROUP_CHAT`` or a - ``DIRECT_MESSAGE`` space results in an invalid - argument error. If you receive the error message - ``ALREADY_EXISTS`` when updating the ``displayName``, - try a different ``displayName``. An existing space - within the Google Workspace organization might - already use this display name.) - - - ``space_type`` (Only supports changing a - ``GROUP_CHAT`` space type to ``SPACE``. Include - ``display_name`` together with ``space_type`` in the - update mask and ensure that the specified space has a - non-empty display name and the ``SPACE`` space type. - Including the ``space_type`` mask and the ``SPACE`` - type in the specified space when updating the display - name is optional if the existing space already has - the ``SPACE`` type. Trying to update the space type - in other ways results in an invalid argument error). - ``space_type`` is not supported with admin access. - - - ``space_details`` - - - ``space_history_state`` (Supports `turning history on - or off for the - space `__ - if `the organization allows users to change their - history - setting `__. - Warning: mutually exclusive with all other field - paths.) ``space_history_state`` is not supported with - admin access. - - - ``access_settings.audience`` (Supports changing the - `access - setting `__ - of who can discover the space, join the space, and - preview the messages in space. If no audience is - specified in the access setting, the space's access - setting is updated to private. Warning: mutually - exclusive with all other field paths.) - ``access_settings.audience`` is not supported with - admin access. - - - Developer Preview: Supports changing the `permission - settings `__ - of a space, supported field paths include: - ``permission_settings.manage_members_and_groups``, - ``permission_settings.modify_space_details``, - ``permission_settings.toggle_history``, - ``permission_settings.use_at_mention_all``, - ``permission_settings.manage_apps``, - ``permission_settings.manage_webhooks``, - ``permission_settings.reply_messages`` (Warning: - mutually exclusive with all other non-permission - settings field paths). ``permission_settings`` is not - supported with admin access. + You can update the following fields for a space: + + ``space_details``: Updates the space's description. + Supports up to 150 characters. + + ``display_name``: Only supports updating the display + name for spaces where ``spaceType`` field is ``SPACE``. + If you receive the error message ``ALREADY_EXISTS``, try + a different value. An existing space within the Google + Workspace organization might already use this display + name. + + ``space_type``: Only supports changing a ``GROUP_CHAT`` + space type to ``SPACE``. Include ``display_name`` + together with ``space_type`` in the update mask and + ensure that the specified space has a non-empty display + name and the ``SPACE`` space type. Including the + ``space_type`` mask and the ``SPACE`` type in the + specified space when updating the display name is + optional if the existing space already has the ``SPACE`` + type. Trying to update the space type in other ways + results in an invalid argument error. ``space_type`` is + not supported with ``useAdminAccess``. + + ``space_history_state``: Updates `space history + settings `__ + by turning history on or off for the space. Only + supported if history settings are enabled for the Google + Workspace organization. To update the space history + state, you must omit all other field masks in your + request. ``space_history_state`` is not supported with + ``useAdminAccess``. + + ``access_settings.audience``: Updates the `access + setting `__ + of who can discover the space, join the space, and + preview the messages in named space where ``spaceType`` + field is ``SPACE``. If the existing space has a target + audience, you can remove the audience and restrict space + access by omitting a value for this field mask. To + update access settings for a space, the authenticating + user must be a space manager and omit all other field + masks in your request. You can't update this field if + the space is in `import + mode `__. + To learn more, see `Make a space discoverable to + specific + users `__. + ``access_settings.audience`` is not supported with + ``useAdminAccess``. + + ``permission_settings``: Supports changing the + `permission + settings `__ + of a space. When updating permission settings, you can + only specify ``permissionSettings`` field masks; you + cannot update other field masks at the same time. + ``permissionSettings`` is not supported with + ``useAdminAccess``. The supported field masks include: + + - ``permission_settings.manageMembersAndGroups`` + - ``permission_settings.modifySpaceDetails`` + - ``permission_settings.toggleHistory`` + - ``permission_settings.useAtMentionAll`` + - ``permission_settings.manageApps`` + - ``permission_settings.manageWebhooks`` + - ``permission_settings.replyMessages`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2916,46 +3060,25 @@ def create_membership( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gc_membership.Membership: - r"""Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + r"""Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. .. code-block:: python @@ -4088,6 +4211,9 @@ def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py index d7565e952ff8..94763fc39240 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py @@ -497,6 +497,158 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class SearchSpacesPager: + """A pager for iterating through ``search_spaces`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.SearchSpacesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``spaces`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchSpaces`` requests and continue to iterate + through the ``spaces`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.SearchSpacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., space.SearchSpacesResponse], + request: space.SearchSpacesRequest, + response: space.SearchSpacesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.SearchSpacesRequest): + The initial request object. + response (google.apps.chat_v1.types.SearchSpacesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = space.SearchSpacesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[space.SearchSpacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[space.Space]: + for page in self.pages: + yield from page.spaces + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchSpacesAsyncPager: + """A pager for iterating through ``search_spaces`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.SearchSpacesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``spaces`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchSpaces`` requests and continue to iterate + through the ``spaces`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.SearchSpacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[space.SearchSpacesResponse]], + request: space.SearchSpacesRequest, + response: space.SearchSpacesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.SearchSpacesRequest): + The initial request object. + response (google.apps.chat_v1.types.SearchSpacesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = space.SearchSpacesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[space.SearchSpacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[space.Space]: + async def async_generator(): + async for page in self.pages: + for response in page.spaces: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListReactionsPager: """A pager for iterating through ``list_reactions`` requests. diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py index e5b32aaf272b..5c83cc5f462e 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py @@ -304,6 +304,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=30.0, client_info=client_info, ), + self.search_spaces: gapic_v1.method.wrap_method( + self.search_spaces, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), self.get_space: gapic_v1.method.wrap_method( self.get_space, default_retry=retries.Retry( @@ -662,6 +676,15 @@ def list_spaces( ]: raise NotImplementedError() + @property + def search_spaces( + self, + ) -> Callable[ + [space.SearchSpacesRequest], + Union[space.SearchSpacesResponse, Awaitable[space.SearchSpacesResponse]], + ]: + raise NotImplementedError() + @property def get_space( self, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py index 2ef3b8c317bb..d02bc6784c2a 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py @@ -254,19 +254,36 @@ def create_message( ) -> Callable[[gc_message.CreateMessageRequest], gc_message.Message]: r"""Return a callable for the create message method over gRPC. - Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication gRPC| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication gRPC| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication gRPC| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication gRPC| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg Returns: Callable[[~.CreateMessageRequest], @@ -293,9 +310,12 @@ def list_messages( r"""Return a callable for the list messages method over gRPC. Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. Returns: @@ -619,6 +639,36 @@ def list_spaces( ) return self._stubs["list_spaces"] + @property + def search_spaces( + self, + ) -> Callable[[space.SearchSpacesRequest], space.SearchSpacesResponse]: + r"""Return a callable for the search spaces method over gRPC. + + Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + Returns: + Callable[[~.SearchSpacesRequest], + ~.SearchSpacesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_spaces" not in self._stubs: + self._stubs["search_spaces"] = self.grpc_channel.unary_unary( + "/google.chat.v1.ChatService/SearchSpaces", + request_serializer=space.SearchSpacesRequest.serialize, + response_deserializer=space.SearchSpacesResponse.deserialize, + ) + return self._stubs["search_spaces"] + @property def get_space(self) -> Callable[[space.GetSpaceRequest], space.Space]: r"""Return a callable for the get space method over gRPC. @@ -656,8 +706,9 @@ def get_space(self) -> Callable[[space.GetSpaceRequest], space.Space]: def create_space(self) -> Callable[[gc_space.CreateSpaceRequest], gc_space.Space]: r"""Return a callable for the create space method over gRPC. - Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -665,6 +716,11 @@ def create_space(self) -> Callable[[gc_space.CreateSpaceRequest], gc_space.Space space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -914,46 +970,25 @@ def create_membership( ) -> Callable[[gc_membership.CreateMembershipRequest], gc_membership.Membership]: r"""Return a callable for the create membership method over gRPC. - Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. Returns: Callable[[~.CreateMembershipRequest], @@ -1241,6 +1276,9 @@ def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py index e36b81f08a6c..86137f66eff8 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py @@ -258,19 +258,36 @@ def create_message( ) -> Callable[[gc_message.CreateMessageRequest], Awaitable[gc_message.Message]]: r"""Return a callable for the create message method over gRPC. - Creates a message in a Google Chat space. The maximum message - size, including text and cards, is 32,000 bytes. For an example, - see `Send a + Creates a message in a Google Chat space. For an example, see + `Send a message `__. - Calling this method requires - `authentication `__ - and supports the following authentication types: + The ``create()`` method requires either user or app + authentication. Chat attributes the message sender differently + depending on the type of authentication that you use in your + request. - - For text messages, user authentication or app authentication - are supported. - - For card messages, only app authentication is supported. - (Only Chat apps can create card messages.) + The following image shows how Chat attributes a message when you + use app authentication. Chat displays the Chat app as the + message sender. The content of the message can contain text + (``text``), cards (``cardsV2``), and accessory widgets + (``accessoryWidgets``). + + |Message sent with app authentication async gRPC| + + The following image shows how Chat attributes a message when you + use user authentication. Chat displays the user as the message + sender and attributes the Chat app to the message by displaying + its name. The content of message can only contain text + (``text``). + + |Message sent with user authentication async gRPC| + + The maximum message size, including the message contents, is + 32,000 bytes. + + .. |Message sent with app authentication async gRPC| image:: https://developers.google.com/workspace/chat/images/message-app-auth.svg + .. |Message sent with user authentication async gRPC| image:: https://developers.google.com/workspace/chat/images/message-user-auth.svg Returns: Callable[[~.CreateMessageRequest], @@ -299,9 +316,12 @@ def list_messages( r"""Return a callable for the list messages method over gRPC. Lists messages in a space that the caller is a member of, - including messages from blocked members and spaces. For an - example, see `List - messages `__. Requires `user + including messages from blocked members and spaces. If you list + messages from a space with no messages, the response is an empty + object. When using a REST/HTTP interface, the response contains + an empty JSON object, ``{}``. For an example, see `List + messages `__. + Requires `user authentication `__. Returns: @@ -629,6 +649,36 @@ def list_spaces( ) return self._stubs["list_spaces"] + @property + def search_spaces( + self, + ) -> Callable[[space.SearchSpacesRequest], Awaitable[space.SearchSpacesResponse]]: + r"""Return a callable for the search spaces method over gRPC. + + Returns a list of spaces in a Google Workspace organization + based on an administrator's search. Requires `user + authentication with administrator + privileges `__. + In the request, set ``use_admin_access`` to ``true``. + + Returns: + Callable[[~.SearchSpacesRequest], + Awaitable[~.SearchSpacesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_spaces" not in self._stubs: + self._stubs["search_spaces"] = self.grpc_channel.unary_unary( + "/google.chat.v1.ChatService/SearchSpaces", + request_serializer=space.SearchSpacesRequest.serialize, + response_deserializer=space.SearchSpacesResponse.deserialize, + ) + return self._stubs["search_spaces"] + @property def get_space(self) -> Callable[[space.GetSpaceRequest], Awaitable[space.Space]]: r"""Return a callable for the get space method over gRPC. @@ -668,8 +718,9 @@ def create_space( ) -> Callable[[gc_space.CreateSpaceRequest], Awaitable[gc_space.Space]]: r"""Return a callable for the create space method over gRPC. - Creates a named space. Spaces grouped by topics aren't - supported. For an example, see `Create a + Creates a space with no members. Can be used to create a named + space. Spaces grouped by topics aren't supported. For an + example, see `Create a space `__. If you receive the error message ``ALREADY_EXISTS`` when @@ -677,6 +728,11 @@ def create_space( space within the Google Workspace organization might already use this display name. + If you're a member of the `Developer Preview + program `__, + you can create a group chat in import mode using + ``spaceType.GROUP_CHAT``. + Requires `user authentication `__. @@ -934,46 +990,25 @@ def create_membership( ]: r"""Return a callable for the create membership method over gRPC. - Creates a human membership or app membership for the calling - app. Creating memberships for other apps isn't supported. For an - example, see `Invite or add a user or a Google Chat app to a - space `__. - When creating a membership, if the specified member has their - auto-accept policy turned off, then they're invited, and must - accept the space invitation before joining. Otherwise, creating - a membership adds the member directly to the specified space. - Requires `user + Creates a membership for the calling Chat app, a user, or a + Google Group. Creating memberships for other Chat apps isn't + supported. When creating a membership, if the specified member + has their auto-accept policy turned off, then they're invited, + and must accept the space invitation before joining. Otherwise, + creating a membership adds the member directly to the specified + space. Requires `user authentication `__. - To specify the member to add, set the ``membership.member.name`` - for the human or app member, or set the - ``membership.group_member.name`` for the group member. - - - To add the calling app to a space or a direct message between - two human users, use ``users/app``. Unable to add other apps - to the space. - - - To add a human user, use ``users/{user}``, where ``{user}`` - can be the email address for the user. For users in the same - Workspace organization ``{user}`` can also be the ``id`` for - the person from the People API, or the ``id`` for the user in - the Directory API. For example, if the People API Person - profile ID for ``user@example.com`` is ``123456789``, you can - add the user to the space by setting the - ``membership.member.name`` to ``users/user@example.com`` or - ``users/123456789``. - - - To add or invite a Google group in a named space, use - ``groups/{group}``, where ``{group}`` is the ``id`` for the - group from the Cloud Identity Groups API. For example, you - can use `Cloud Identity Groups lookup - API `__ - to retrieve the ID ``123456789`` for group email - ``group@example.com``, then you can add or invite the group - to a named space by setting the - ``membership.group_member.name`` to ``groups/123456789``. - Group email is not supported, and Google groups can only be - added as members in named spaces. + For example usage, see: + + - `Invite or add a user to a + space `__. + + - `Invite or add a Google Group to a + space `__. + + - `Add the Chat app to a + space `__. Returns: Callable[[~.CreateMembershipRequest], @@ -1271,6 +1306,9 @@ def get_space_event( message was later updated, the server returns the updated ``Message`` resource in the event payload. + Note: The ``permissionSettings`` field is not returned in the + Space object of the Space event data for this request. + Requires `user authentication `__. To get an event, the authenticated user must be a member of the @@ -1485,6 +1523,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=30.0, client_info=client_info, ), + self.search_spaces: gapic_v1.method_async.wrap_method( + self.search_spaces, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), self.get_space: gapic_v1.method_async.wrap_method( self.get_space, default_retry=retries.AsyncRetry( diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py index 87947ff116c8..d9717f7c33ea 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py @@ -241,6 +241,14 @@ def post_list_spaces(self, response): logging.log(f"Received response: {response}") return response + def pre_search_spaces(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_spaces(self, response): + logging.log(f"Received response: {response}") + return response + def pre_set_up_space(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -744,6 +752,27 @@ def post_list_spaces( """ return response + def pre_search_spaces( + self, request: space.SearchSpacesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[space.SearchSpacesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search_spaces + + Override in a subclass to manipulate the request or metadata + before they are sent to the ChatService server. + """ + return request, metadata + + def post_search_spaces( + self, response: space.SearchSpacesResponse + ) -> space.SearchSpacesResponse: + """Post-rpc interceptor for search_spaces + + Override in a subclass to manipulate the response + after it is returned by the ChatService server but before + it is returned to user code. + """ + return response + def pre_set_up_space( self, request: space_setup.SetUpSpaceRequest, @@ -1380,7 +1409,8 @@ def __call__( Args: request (~.gc_space.CreateSpaceRequest): - The request object. A request to create a named space. + The request object. A request to create a named space + with no members. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2908,6 +2938,97 @@ def __call__( resp = self._interceptor.post_list_spaces(resp) return resp + class _SearchSpaces(ChatServiceRestStub): + def __hash__(self): + return hash("SearchSpaces") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "query": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: space.SearchSpacesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> space.SearchSpacesResponse: + r"""Call the search spaces method over HTTP. + + Args: + request (~.space.SearchSpacesRequest): + The request object. Request to search for a list of + spaces based on a query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.space.SearchSpacesResponse: + Response with a list of spaces + corresponding to the search spaces + request. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/spaces:search", + }, + ] + request, metadata = self._interceptor.pre_search_spaces(request, metadata) + pb_request = space.SearchSpacesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = space.SearchSpacesResponse() + pb_resp = space.SearchSpacesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_spaces(resp) + return resp + class _SetUpSpace(ChatServiceRestStub): def __hash__(self): return hash("SetUpSpace") @@ -3671,6 +3792,14 @@ def list_spaces( # In C++ this would require a dynamic_cast return self._ListSpaces(self._session, self._host, self._interceptor) # type: ignore + @property + def search_spaces( + self, + ) -> Callable[[space.SearchSpacesRequest], space.SearchSpacesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchSpaces(self._session, self._host, self._interceptor) # type: ignore + @property def set_up_space(self) -> Callable[[space_setup.SetUpSpaceRequest], space.Space]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py index e47046db19af..d510a888bd89 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py @@ -17,6 +17,7 @@ from .annotation import ( Annotation, AnnotationType, + ChatSpaceLinkData, DriveLinkData, RichLinkMetadata, SlashCommandMetadata, @@ -101,6 +102,8 @@ GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, + SearchSpacesRequest, + SearchSpacesResponse, Space, UpdateSpaceRequest, ) @@ -123,6 +126,7 @@ __all__ = ( "ActionStatus", "Annotation", + "ChatSpaceLinkData", "DriveLinkData", "RichLinkMetadata", "SlashCommandMetadata", @@ -196,6 +200,8 @@ "GetSpaceRequest", "ListSpacesRequest", "ListSpacesResponse", + "SearchSpacesRequest", + "SearchSpacesResponse", "Space", "UpdateSpaceRequest", "GetSpaceEventRequest", diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/annotation.py b/packages/google-apps-chat/google/apps/chat_v1/types/annotation.py index fa2e62f43671..ee3267d68907 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/annotation.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/annotation.py @@ -31,6 +31,7 @@ "SlashCommandMetadata", "RichLinkMetadata", "DriveLinkData", + "ChatSpaceLinkData", }, ) @@ -247,6 +248,11 @@ class Type(proto.Enum): class RichLinkMetadata(proto.Message): r"""A rich link to a resource. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -257,6 +263,10 @@ class RichLinkMetadata(proto.Message): drive_link_data (google.apps.chat_v1.types.DriveLinkData): Data for a drive link. + This field is a member of `oneof`_ ``data``. + chat_space_link_data (google.apps.chat_v1.types.ChatSpaceLinkData): + Data for a chat space link. + This field is a member of `oneof`_ ``data``. """ @@ -268,9 +278,13 @@ class RichLinkType(proto.Enum): Default value for the enum. Don't use. DRIVE_FILE (1): A Google Drive rich link type. + CHAT_SPACE (2): + A Chat space rich link type. For example, a + space smart chip. """ RICH_LINK_TYPE_UNSPECIFIED = 0 DRIVE_FILE = 1 + CHAT_SPACE = 2 uri: str = proto.Field( proto.STRING, @@ -287,6 +301,12 @@ class RichLinkType(proto.Enum): oneof="data", message="DriveLinkData", ) + chat_space_link_data: "ChatSpaceLinkData" = proto.Field( + proto.MESSAGE, + number=4, + oneof="data", + message="ChatSpaceLinkData", + ) class DriveLinkData(proto.Message): @@ -313,4 +333,36 @@ class DriveLinkData(proto.Message): ) +class ChatSpaceLinkData(proto.Message): + r"""Data for Chat space links. + + Attributes: + space (str): + The space of the linked Chat space resource. + + Format: ``spaces/{space}`` + thread (str): + The thread of the linked Chat space resource. + + Format: ``spaces/{space}/threads/{thread}`` + message (str): + The message of the linked Chat space resource. + + Format: ``spaces/{space}/messages/{message}`` + """ + + space: str = proto.Field( + proto.STRING, + number=1, + ) + thread: str = proto.Field( + proto.STRING, + number=2, + ) + message: str = proto.Field( + proto.STRING, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/membership.py b/packages/google-apps-chat/google/apps/chat_v1/types/membership.py index 925e1d7b28ed..f0e00c470930 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/membership.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/membership.py @@ -208,6 +208,20 @@ class CreateMembershipRequest(proto.Message): relation for itself, it must use the ``chat.memberships.app`` scope, set ``user.type`` to ``BOT``, and set ``user.name`` to ``users/app``. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` `OAuth 2.0 + scope `__. + + Creating app memberships or creating memberships for users + outside the administrator's Google Workspace organization + isn't supported using admin access. """ parent: str = proto.Field( @@ -219,6 +233,10 @@ class CreateMembershipRequest(proto.Message): number=2, message="Membership", ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=5, + ) class UpdateMembershipRequest(proto.Message): @@ -235,6 +253,16 @@ class UpdateMembershipRequest(proto.Message): Currently supported field paths: - ``role`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` `OAuth 2.0 + scope `__. """ membership: "Membership" = proto.Field( @@ -247,6 +275,10 @@ class UpdateMembershipRequest(proto.Message): number=2, message=field_mask_pb2.FieldMask, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=3, + ) class ListMembershipsRequest(proto.Message): @@ -289,8 +321,8 @@ class ListMembershipsRequest(proto.Message): ``ROLE_MANAGER``. To filter by type, set ``member.type`` to ``HUMAN`` or - ``BOT``. Developer Preview: You can also filter for - ``member.type`` using the ``!=`` operator. + ``BOT``. You can also filter for ``member.type`` using the + ``!=`` operator. To filter by both role and type, use the ``AND`` operator. To filter by either role or type, use the ``OR`` operator. @@ -338,6 +370,20 @@ class ListMembershipsRequest(proto.Message): Currently requires `user authentication `__. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires either the ``chat.admin.memberships.readonly`` or + ``chat.admin.memberships`` `OAuth 2.0 + scope `__. + + Listing app memberships in a space isn't supported when + using admin access. """ parent: str = proto.Field( @@ -364,6 +410,10 @@ class ListMembershipsRequest(proto.Message): proto.BOOL, number=7, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=8, + ) class ListMembershipsResponse(proto.Message): @@ -414,12 +464,30 @@ class GetMembershipRequest(proto.Message): For example, ``spaces/{space}/members/example@gmail.com`` where ``example@gmail.com`` is the email of the Google Chat user. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` or + ``chat.admin.memberships.readonly`` `OAuth 2.0 + scopes `__. + + Getting app memberships in a space isn't supported when + using admin access. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=3, + ) class DeleteMembershipRequest(proto.Message): @@ -444,12 +512,29 @@ class DeleteMembershipRequest(proto.Message): Format: ``spaces/{space}/members/{member}`` or ``spaces/{space}/members/app``. + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.memberships`` `OAuth 2.0 + scope `__. + + Deleting app memberships in a space isn't supported using + admin access. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=2, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/message.py b/packages/google-apps-chat/google/apps/chat_v1/types/message.py index 90dda263a1ec..56d10d7b3574 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/message.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/message.py @@ -109,8 +109,8 @@ class Message(proto.Message): user `__, or everyone in the space. - To learn about creating text messages, see `Send a text - message `__. + To learn about creating text messages, see `Send a + message `__. formatted_text (str): Output only. Contains the message ``text`` with markups added to communicate formatting. This field might not @@ -154,8 +154,9 @@ class Message(proto.Message): user `__, the messages can't contain cards. - To learn about cards and how to create them, see `Send card - messages `__. + To learn how to create a message that contains cards, see + `Send a + message `__. `Card builder `__ @@ -213,17 +214,17 @@ class Message(proto.Message): Immutable. Input for creating a message, otherwise output only. The user that can view the message. When set, the message is private and only visible to the specified user - and the Chat app. Link previews and attachments aren't - supported for private messages. + and the Chat app. To include this field in your request, you + must call the Chat API using `app + authentication `__ + and omit the following: - Only Chat apps can send private messages. If your Chat app - `authenticates as a - user `__ - to send a message, the message can't be private and must - omit this field. + - `Attachments `__ + - `Accessory + widgets `__ - For details, see `Send private messages to Google Chat - users `__. + For details, see `Send a message + privately `__. deletion_metadata (google.apps.chat_v1.types.DeletionMetadata): Output only. Information about a deleted message. A message is deleted when ``delete_time`` is set. @@ -428,7 +429,7 @@ class Thread(proto.Message): Attributes: name (str): - Output only. Resource name of the thread. + Resource name of the thread. Example: ``spaces/{space}/threads/{thread}`` thread_key (str): diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/space.py b/packages/google-apps-chat/google/apps/chat_v1/types/space.py index 46f46068321a..694375fc0ebd 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/space.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/space.py @@ -33,6 +33,8 @@ "GetSpaceRequest", "FindDirectMessageRequest", "UpdateSpaceRequest", + "SearchSpacesRequest", + "SearchSpacesResponse", "DeleteSpaceRequest", "CompleteImportSpaceRequest", "CompleteImportSpaceResponse", @@ -44,11 +46,26 @@ class Space(proto.Message): r"""A space in Google Chat. Spaces are conversations between two or more users or 1:1 messages between a user and a Chat app. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Resource name of the space. Format: ``spaces/{space}`` + + Where ``{space}`` represents the system-assigned ID for the + space. You can obtain the space ID by calling the + ```spaces.list()`` `__ + method or from the space URL. For example, if the space URL + is + ``https://mail.google.com/mail/u/0/#chat/space/AAAAAAAAA``, + the space ID is ``AAAAAAAAA``. type_ (google.apps.chat_v1.types.Space.Type): Output only. Deprecated: Use ``space_type`` instead. The type of a space. @@ -64,12 +81,12 @@ class Space(proto.Message): instead. Whether messages are threaded in this space. display_name (str): The space's display name. Required when `creating a - space `__. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space or updating the ``displayName``, try a - different ``displayName``. An existing space within the - Google Workspace organization might already use this display - name. + space `__ + with a ``spaceType`` of ``SPACE``. If you receive the error + message ``ALREADY_EXISTS`` when creating a space or updating + the ``displayName``, try a different ``displayName``. An + existing space within the Google Workspace organization + might already use this display name. For direct messages, this field might be empty. @@ -84,15 +101,6 @@ class Space(proto.Message): user account). By default, a space created by a consumer account permits any Google Chat user. - - The space is used to [import data to Google Chat] - (https://developers.google.com/chat/api/guides/import-data-overview) - because import mode spaces must only permit members from - the same Google Workspace organization. However, as part - of the `Google Workspace Developer Preview - Program `__, - import mode spaces can permit any Google Chat user so - this field can then be set for import mode spaces. - For existing spaces, this field is output only. space_threading_state (google.apps.chat_v1.types.Space.SpaceThreadingState): Output only. The threading state in the Chat @@ -119,6 +127,9 @@ class Space(proto.Message): Only populated in the output when ``spaceType`` is ``GROUP_CHAT`` or ``SPACE``. + last_active_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp of the last message in + the space. admin_installed (bool): Output only. For direct message (DM) spaces with a Chat app, whether the space was created @@ -129,6 +140,10 @@ class Space(proto.Message): To support admin install, your Chat app must feature direct messaging. + membership_count (google.apps.chat_v1.types.Space.MembershipCount): + Output only. The count of joined memberships grouped by + member type. Populated when the ``space_type`` is ``SPACE``, + ``DIRECT_MESSAGE`` or ``GROUP_CHAT``. access_settings (google.apps.chat_v1.types.Space.AccessSettings): Optional. Specifies the `access setting `__ @@ -137,6 +152,21 @@ class Space(proto.Message): space_uri (str): Output only. The URI for a user to access the space. + predefined_permission_settings (google.apps.chat_v1.types.Space.PredefinedPermissionSettings): + Optional. Input only. Predefined space permission settings, + input only when creating a space. If the field is not set, a + collaboration space is created. After you create the space, + settings are populated in the ``PermissionSettings`` field. + + This field is a member of `oneof`_ ``space_permission_settings``. + permission_settings (google.apps.chat_v1.types.Space.PermissionSettings): + Optional. Space permission settings for + existing spaces. Input for updating exact space + permission settings, where existing permission + settings are replaced. Output lists current + permission settings. + + This field is a member of `oneof`_ ``space_permission_settings``. """ class Type(proto.Enum): @@ -203,6 +233,27 @@ class SpaceThreadingState(proto.Enum): GROUPED_MESSAGES = 3 UNTHREADED_MESSAGES = 4 + class PredefinedPermissionSettings(proto.Enum): + r"""Predefined permission settings that you can only specify when + creating a named space. More settings might be added in the future. + For details about permission settings for named spaces, see `Learn + about spaces `__. + + Values: + PREDEFINED_PERMISSION_SETTINGS_UNSPECIFIED (0): + Unspecified. Don't use. + COLLABORATION_SPACE (1): + Setting to make the space a collaboration + space where all members can post messages. + ANNOUNCEMENT_SPACE (2): + Setting to make the space an announcement + space where only space managers can post + messages. + """ + PREDEFINED_PERMISSION_SETTINGS_UNSPECIFIED = 0 + COLLABORATION_SPACE = 1 + ANNOUNCEMENT_SPACE = 2 + class SpaceDetails(proto.Message): r"""Details about the space including description and rules. @@ -228,6 +279,29 @@ class SpaceDetails(proto.Message): number=2, ) + class MembershipCount(proto.Message): + r"""Represents the count of memberships of a space, grouped into + categories. + + Attributes: + joined_direct_human_user_count (int): + Count of human users that have directly + joined the space, not counting users joined by + having membership in a joined group. + joined_group_count (int): + Count of all groups that have directly joined + the space. + """ + + joined_direct_human_user_count: int = proto.Field( + proto.INT32, + number=4, + ) + joined_group_count: int = proto.Field( + proto.INT32, + number=5, + ) + class AccessSettings(proto.Message): r"""Represents the `access setting `__ of the @@ -241,14 +315,20 @@ class AccessSettings(proto.Message): Optional. The resource name of the `target audience `__ who can discover the space, join the space, and preview the - messages in the space. For details, see `Make a space - discoverable to a target + messages in the space. If unset, only users or Google Groups + who have been individually invited or added to the space can + access it. For details, see `Make a space discoverable to a + target audience `__. Format: ``audiences/{audience}`` To use the default target audience for the Google Workspace organization, set to ``audiences/default``. + + This field is not populated when using the ``chat.bot`` + scope with `app + authentication `__. """ class AccessState(proto.Enum): @@ -259,12 +339,17 @@ class AccessState(proto.Enum): Access state is unknown or not supported in this API. PRIVATE (1): - Space is discoverable by added or invited - members or groups. + Only users or Google Groups that have been + individually added or invited by other users or + Google Workspace administrators can discover and + access the space. DISCOVERABLE (2): - Space is discoverable by the selected `target - audience `__, - as well as added or invited members or groups. + A space manager has granted a target audience access to the + space. Users or Google Groups that have been individually + added or invited to the space can also discover and access + the space. To learn more, see `Make a space discoverable to + specific + users `__. """ ACCESS_STATE_UNSPECIFIED = 0 PRIVATE = 1 @@ -280,6 +365,125 @@ class AccessState(proto.Enum): number=3, ) + class PermissionSettings(proto.Message): + r"""`Permission + settings `__ that + you can specify when updating an existing named space. + + To set permission settings when creating a space, specify the + ``PredefinedPermissionSettings`` field in your request. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + manage_members_and_groups (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for managing members and groups in a + space. + + This field is a member of `oneof`_ ``_manage_members_and_groups``. + modify_space_details (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for updating space name, avatar, + description and guidelines. + + This field is a member of `oneof`_ ``_modify_space_details``. + toggle_history (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for toggling space history on and + off. + + This field is a member of `oneof`_ ``_toggle_history``. + use_at_mention_all (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for using @all in a space. + + This field is a member of `oneof`_ ``_use_at_mention_all``. + manage_apps (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for managing apps in a space. + + This field is a member of `oneof`_ ``_manage_apps``. + manage_webhooks (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for managing webhooks in a space. + + This field is a member of `oneof`_ ``_manage_webhooks``. + post_messages (google.apps.chat_v1.types.Space.PermissionSetting): + Output only. Setting for posting messages in + a space. + + This field is a member of `oneof`_ ``_post_messages``. + reply_messages (google.apps.chat_v1.types.Space.PermissionSetting): + Setting for replying to messages in a space. + + This field is a member of `oneof`_ ``_reply_messages``. + """ + + manage_members_and_groups: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="Space.PermissionSetting", + ) + modify_space_details: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="Space.PermissionSetting", + ) + toggle_history: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="Space.PermissionSetting", + ) + use_at_mention_all: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="Space.PermissionSetting", + ) + manage_apps: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message="Space.PermissionSetting", + ) + manage_webhooks: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message="Space.PermissionSetting", + ) + post_messages: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message="Space.PermissionSetting", + ) + reply_messages: "Space.PermissionSetting" = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message="Space.PermissionSetting", + ) + + class PermissionSetting(proto.Message): + r"""Represents a space permission setting. + + Attributes: + managers_allowed (bool): + Whether spaces managers have this permission. + members_allowed (bool): + Whether non-manager members have this + permission. + """ + + managers_allowed: bool = proto.Field( + proto.BOOL, + number=1, + ) + members_allowed: bool = proto.Field( + proto.BOOL, + number=2, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -334,10 +538,20 @@ class AccessState(proto.Enum): number=17, message=timestamp_pb2.Timestamp, ) + last_active_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp_pb2.Timestamp, + ) admin_installed: bool = proto.Field( proto.BOOL, number=19, ) + membership_count: MembershipCount = proto.Field( + proto.MESSAGE, + number=20, + message=MembershipCount, + ) access_settings: AccessSettings = proto.Field( proto.MESSAGE, number=23, @@ -347,20 +561,37 @@ class AccessState(proto.Enum): proto.STRING, number=25, ) + predefined_permission_settings: PredefinedPermissionSettings = proto.Field( + proto.ENUM, + number=26, + oneof="space_permission_settings", + enum=PredefinedPermissionSettings, + ) + permission_settings: PermissionSettings = proto.Field( + proto.MESSAGE, + number=27, + oneof="space_permission_settings", + message=PermissionSettings, + ) class CreateSpaceRequest(proto.Message): - r"""A request to create a named space. + r"""A request to create a named space with no members. Attributes: space (google.apps.chat_v1.types.Space): Required. The ``displayName`` and ``spaceType`` fields must be populated. Only ``SpaceType.SPACE`` is supported. - If you receive the error message ``ALREADY_EXISTS`` when - creating a space, try a different ``displayName``. An - existing space within the Google Workspace organization - might already use this display name. + If you receive the error message ``ALREADY_EXISTS``, try a + different ``displayName``. An existing space within the + Google Workspace organization might already use this display + name. + + If you're a member of the `Developer Preview + program `__, + ``SpaceType.GROUP_CHAT`` can be used if ``importMode`` is + set to true. The space ``name`` is assigned on the server so anything specified in this field will be ignored. @@ -449,8 +680,9 @@ class ListSpacesResponse(proto.Message): Attributes: spaces (MutableSequence[google.apps.chat_v1.types.Space]): - List of spaces in the requested (or first) - page. + List of spaces in the requested (or first) page. Note: The + ``permissionSettings`` field is not returned in the Space + object for list requests. next_page_token (str): You can send a token as ``pageToken`` to retrieve the next page of results. If empty, there are no subsequent pages. @@ -480,12 +712,27 @@ class GetSpaceRequest(proto.Message): ``spaces/{space}``. Format: ``spaces/{space}`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.spaces`` or + ``chat.admin.spaces.readonly`` `OAuth 2.0 + scopes `__. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=2, + ) class FindDirectMessageRequest(proto.Message): @@ -530,67 +777,82 @@ class UpdateSpaceRequest(proto.Message): Required. The updated field paths, comma separated if there are multiple. - Currently supported field paths: - - - ``display_name`` (Only supports changing the display name - of a space with the ``SPACE`` type, or when also - including the ``space_type`` mask to change a - ``GROUP_CHAT`` space type to ``SPACE``. Trying to update - the display name of a ``GROUP_CHAT`` or a - ``DIRECT_MESSAGE`` space results in an invalid argument - error. If you receive the error message - ``ALREADY_EXISTS`` when updating the ``displayName``, try - a different ``displayName``. An existing space within the - Google Workspace organization might already use this - display name.) - - - ``space_type`` (Only supports changing a ``GROUP_CHAT`` - space type to ``SPACE``. Include ``display_name`` - together with ``space_type`` in the update mask and - ensure that the specified space has a non-empty display - name and the ``SPACE`` space type. Including the - ``space_type`` mask and the ``SPACE`` type in the - specified space when updating the display name is - optional if the existing space already has the ``SPACE`` - type. Trying to update the space type in other ways - results in an invalid argument error). ``space_type`` is - not supported with admin access. - - - ``space_details`` - - - ``space_history_state`` (Supports `turning history on or - off for the - space `__ - if `the organization allows users to change their history - setting `__. - Warning: mutually exclusive with all other field paths.) - ``space_history_state`` is not supported with admin - access. - - - ``access_settings.audience`` (Supports changing the - `access - setting `__ - of who can discover the space, join the space, and - preview the messages in space. If no audience is - specified in the access setting, the space's access - setting is updated to private. Warning: mutually - exclusive with all other field paths.) - ``access_settings.audience`` is not supported with admin - access. - - - Developer Preview: Supports changing the `permission - settings `__ - of a space, supported field paths include: - ``permission_settings.manage_members_and_groups``, - ``permission_settings.modify_space_details``, - ``permission_settings.toggle_history``, - ``permission_settings.use_at_mention_all``, - ``permission_settings.manage_apps``, - ``permission_settings.manage_webhooks``, - ``permission_settings.reply_messages`` (Warning: mutually - exclusive with all other non-permission settings field - paths). ``permission_settings`` is not supported with - admin access. + You can update the following fields for a space: + + ``space_details``: Updates the space's description. Supports + up to 150 characters. + + ``display_name``: Only supports updating the display name + for spaces where ``spaceType`` field is ``SPACE``. If you + receive the error message ``ALREADY_EXISTS``, try a + different value. An existing space within the Google + Workspace organization might already use this display name. + + ``space_type``: Only supports changing a ``GROUP_CHAT`` + space type to ``SPACE``. Include ``display_name`` together + with ``space_type`` in the update mask and ensure that the + specified space has a non-empty display name and the + ``SPACE`` space type. Including the ``space_type`` mask and + the ``SPACE`` type in the specified space when updating the + display name is optional if the existing space already has + the ``SPACE`` type. Trying to update the space type in other + ways results in an invalid argument error. ``space_type`` is + not supported with ``useAdminAccess``. + + ``space_history_state``: Updates `space history + settings `__ + by turning history on or off for the space. Only supported + if history settings are enabled for the Google Workspace + organization. To update the space history state, you must + omit all other field masks in your request. + ``space_history_state`` is not supported with + ``useAdminAccess``. + + ``access_settings.audience``: Updates the `access + setting `__ + of who can discover the space, join the space, and preview + the messages in named space where ``spaceType`` field is + ``SPACE``. If the existing space has a target audience, you + can remove the audience and restrict space access by + omitting a value for this field mask. To update access + settings for a space, the authenticating user must be a + space manager and omit all other field masks in your + request. You can't update this field if the space is in + `import + mode `__. + To learn more, see `Make a space discoverable to specific + users `__. + ``access_settings.audience`` is not supported with + ``useAdminAccess``. + + ``permission_settings``: Supports changing the `permission + settings `__ + of a space. When updating permission settings, you can only + specify ``permissionSettings`` field masks; you cannot + update other field masks at the same time. + ``permissionSettings`` is not supported with + ``useAdminAccess``. The supported field masks include: + + - ``permission_settings.manageMembersAndGroups`` + - ``permission_settings.modifySpaceDetails`` + - ``permission_settings.toggleHistory`` + - ``permission_settings.useAtMentionAll`` + - ``permission_settings.manageApps`` + - ``permission_settings.manageWebhooks`` + - ``permission_settings.replyMessages`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.spaces`` `OAuth 2.0 + scope `__. + + Some ``FieldMask`` values are not supported using admin + access. For details, see the description of ``update_mask``. """ space: "Space" = proto.Field( @@ -603,6 +865,209 @@ class UpdateSpaceRequest(proto.Message): number=2, message=field_mask_pb2.FieldMask, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class SearchSpacesRequest(proto.Message): + r"""Request to search for a list of spaces based on a query. + + Attributes: + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires either the ``chat.admin.spaces.readonly`` or + ``chat.admin.spaces`` `OAuth 2.0 + scope `__. + + This method currently only supports admin access, thus only + ``true`` is accepted for this field. + page_size (int): + The maximum number of spaces to return. The + service may return fewer than this value. + + If unspecified, at most 100 spaces are returned. + + The maximum value is 1000. If you use a value + more than 1000, it's automatically changed to + 1000. + page_token (str): + A token, received from the previous search + spaces call. Provide this parameter to retrieve + the subsequent page. + + When paginating, all other parameters provided + should match the call that provided the page + token. Passing different values to the other + parameters might lead to unexpected results. + query (str): + Required. A search query. + + You can search by using the following parameters: + + - ``create_time`` + - ``customer`` + - ``display_name`` + - ``external_user_allowed`` + - ``last_active_time`` + - ``space_history_state`` + - ``space_type`` + + ``create_time`` and ``last_active_time`` accept a timestamp + in `RFC-3339 `__ + format and the supported comparison operators are: ``=``, + ``<``, ``>``, ``<=``, ``>=``. + + ``customer`` is required and is used to indicate which + customer to fetch spaces from. ``customers/my_customer`` is + the only supported value. + + ``display_name`` only accepts the ``HAS`` (``:``) operator. + The text to match is first tokenized into tokens and each + token is prefix-matched case-insensitively and independently + as a substring anywhere in the space's ``display_name``. For + example, ``Fun Eve`` matches ``Fun event`` or + ``The evening was fun``, but not ``notFun event`` or + ``even``. + + ``external_user_allowed`` accepts either ``true`` or + ``false``. + + ``space_history_state`` only accepts values from the + [``historyState``] + (https://developers.google.com/workspace/chat/api/reference/rest/v1/spaces#Space.HistoryState) + field of a ``space`` resource. + + ``space_type`` is required and the only valid value is + ``SPACE``. + + Across different fields, only ``AND`` operators are + supported. A valid example is + ``space_type = "SPACE" AND display_name:"Hello"`` and an + invalid example is + ``space_type = "SPACE" OR display_name:"Hello"``. + + Among the same field, ``space_type`` doesn't support ``AND`` + or ``OR`` operators. ``display_name``, + 'space_history_state', and 'external_user_allowed' only + support ``OR`` operators. ``last_active_time`` and + ``create_time`` support both ``AND`` and ``OR`` operators. + ``AND`` can only be used to represent an interval, such as + ``last_active_time < "2022-01-01T00:00:00+00:00" AND last_active_time > "2023-01-01T00:00:00+00:00"``. + + The following example queries are valid: + + :: + + customer = "customers/my_customer" AND space_type = "SPACE" + + customer = "customers/my_customer" AND space_type = "SPACE" AND + display_name:"Hello World" + + customer = "customers/my_customer" AND space_type = "SPACE" AND + (last_active_time < "2020-01-01T00:00:00+00:00" OR last_active_time > + "2022-01-01T00:00:00+00:00") + + customer = "customers/my_customer" AND space_type = "SPACE" AND + (display_name:"Hello World" OR display_name:"Fun event") AND + (last_active_time > "2020-01-01T00:00:00+00:00" AND last_active_time < + "2022-01-01T00:00:00+00:00") + + customer = "customers/my_customer" AND space_type = "SPACE" AND + (create_time > "2019-01-01T00:00:00+00:00" AND create_time < + "2020-01-01T00:00:00+00:00") AND (external_user_allowed = "true") AND + (space_history_state = "HISTORY_ON" OR space_history_state = "HISTORY_OFF") + order_by (str): + Optional. How the list of spaces is ordered. + + Supported attributes to order by are: + + - ``membership_count.joined_direct_human_user_count`` — + Denotes the count of human users that have directly + joined a space. + - ``last_active_time`` — Denotes the time when last + eligible item is added to any topic of this space. + - ``create_time`` — Denotes the time of the space creation. + + Valid ordering operation values are: + + - ``ASC`` for ascending. Default value. + + - ``DESC`` for descending. + + The supported syntax are: + + - ``membership_count.joined_direct_human_user_count DESC`` + - ``membership_count.joined_direct_human_user_count ASC`` + - ``last_active_time DESC`` + - ``last_active_time ASC`` + - ``create_time DESC`` + - ``create_time ASC`` + """ + + use_admin_access: bool = proto.Field( + proto.BOOL, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + query: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class SearchSpacesResponse(proto.Message): + r"""Response with a list of spaces corresponding to the search + spaces request. + + Attributes: + spaces (MutableSequence[google.apps.chat_v1.types.Space]): + A page of the requested spaces. + next_page_token (str): + A token that can be used to retrieve the next + page. If this field is empty, there are no + subsequent pages. + total_size (int): + The total number of spaces that match the + query, across all pages. If the result is over + 10,000 spaces, this value is an estimate. + """ + + @property + def raw_page(self): + return self + + spaces: MutableSequence["Space"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Space", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) class DeleteSpaceRequest(proto.Message): @@ -613,12 +1078,26 @@ class DeleteSpaceRequest(proto.Message): Required. Resource name of the space to delete. Format: ``spaces/{space}`` + use_admin_access (bool): + When ``true``, the method runs using the user's Google + Workspace administrator privileges. + + The calling user must be a Google Workspace administrator + with the `manage chat and spaces conversations + privilege `__. + + Requires the ``chat.admin.delete`` `OAuth 2.0 + scope `__. """ name: str = proto.Field( proto.STRING, number=1, ) + use_admin_access: bool = proto.Field( + proto.BOOL, + number=2, + ) class CompleteImportSpaceRequest(proto.Message): diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py b/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py index 96591e800225..17bf07ba27d7 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/space_event.py @@ -492,8 +492,9 @@ class ListSpaceEventsResponse(proto.Message): Attributes: space_events (MutableSequence[google.apps.chat_v1.types.SpaceEvent]): - Results are returned in chronological order - (oldest event first). + Results are returned in chronological order (oldest event + first). Note: The ``permissionSettings`` field is not + returned in the Space object for list requests. next_page_token (str): Continuation token used to fetch more events. If this field is omitted, there are no diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py index 047b4031cee0..b72fb46759d1 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_async.py @@ -39,7 +39,11 @@ async def sample_create_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py index 845ce548ddc0..083a0684ae1e 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_create_space_sync.py @@ -39,7 +39,11 @@ def sample_create_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.CreateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py similarity index 76% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_async.py rename to packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py index 5130bd9cbca6..3d25def75a1b 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_apis_async.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListApis +# Snippet for SearchSpaces # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-apps-chat -# [START apihub_v1_generated_ApiHub_ListApis_async] +# [START chat_v1_generated_ChatService_SearchSpaces_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.apps import chat_v1 -async def sample_list_apis(): +async def sample_search_spaces(): # Create a client - client = apihub_v1.ApiHubAsyncClient() + client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) - request = apihub_v1.ListApisRequest( - parent="parent_value", + request = chat_v1.SearchSpacesRequest( + query="query_value", ) # Make the request - page_result = client.list_apis(request=request) + page_result = client.search_spaces(request=request) # Handle the response async for response in page_result: print(response) -# [END apihub_v1_generated_ApiHub_ListApis_async] +# [END chat_v1_generated_ChatService_SearchSpaces_async] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py new file mode 100644 index 000000000000..52e32c45ead5 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_search_spaces_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchSpaces +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_SearchSpaces_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +def sample_search_spaces(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.SearchSpacesRequest( + query="query_value", + ) + + # Make the request + page_result = client.search_spaces(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END chat_v1_generated_ChatService_SearchSpaces_sync] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py index 6870b8744a05..8e2b99a025ad 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_async.py @@ -39,7 +39,11 @@ async def sample_set_up_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py index 4d27131a243c..61ad37d3d045 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_set_up_space_sync.py @@ -39,7 +39,11 @@ def sample_set_up_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.SetUpSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py index 14e0944aaaeb..2c2a593b22a2 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_async.py @@ -39,7 +39,11 @@ async def sample_update_space(): client = chat_v1.ChatServiceAsyncClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py index a5158f7a9e07..362d50feb376 100644 --- a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_update_space_sync.py @@ -39,7 +39,11 @@ def sample_update_space(): client = chat_v1.ChatServiceClient() # Initialize request argument(s) + space = chat_v1.Space() + space.predefined_permission_settings = "ANNOUNCEMENT_SPACE" + request = chat_v1.UpdateSpaceRequest( + space=space, ) # Make the request diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index 6e637d46d014..8e15b6a7b93b 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.1.0" + "version": "0.1.12" }, "snippets": [ { @@ -728,12 +728,12 @@ "regionTag": "chat_v1_generated_ChatService_CreateSpace_async", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -743,18 +743,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -808,12 +808,12 @@ "regionTag": "chat_v1_generated_ChatService_CreateSpace_sync", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -823,18 +823,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -3551,6 +3551,159 @@ ], "title": "chat_v1_generated_chat_service_list_spaces_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", + "shortName": "ChatServiceAsyncClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.search_spaces", + "method": { + "fullName": "google.chat.v1.ChatService.SearchSpaces", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "SearchSpaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.SearchSpacesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.SearchSpacesAsyncPager", + "shortName": "search_spaces" + }, + "description": "Sample for SearchSpaces", + "file": "chat_v1_generated_chat_service_search_spaces_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_SearchSpaces_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_search_spaces_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.chat_v1.ChatServiceClient", + "shortName": "ChatServiceClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceClient.search_spaces", + "method": { + "fullName": "google.chat.v1.ChatService.SearchSpaces", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "SearchSpaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.SearchSpacesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.SearchSpacesPager", + "shortName": "search_spaces" + }, + "description": "Sample for SearchSpaces", + "file": "chat_v1_generated_chat_service_search_spaces_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_SearchSpaces_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_search_spaces_sync.py" + }, { "canonical": true, "clientMethod": { @@ -3596,12 +3749,12 @@ "regionTag": "chat_v1_generated_ChatService_SetUpSpace_async", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -3611,18 +3764,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -3672,12 +3825,12 @@ "regionTag": "chat_v1_generated_ChatService_SetUpSpace_sync", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -3687,18 +3840,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -4264,12 +4417,12 @@ "regionTag": "chat_v1_generated_ChatService_UpdateSpace_async", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4279,18 +4432,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -4348,12 +4501,12 @@ "regionTag": "chat_v1_generated_ChatService_UpdateSpace_sync", "segments": [ { - "end": 50, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 54, "start": 27, "type": "SHORT" }, @@ -4363,18 +4516,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py b/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py index 6dfa6d4d93f8..18fea55c4198 100644 --- a/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py +++ b/packages/google-apps-chat/scripts/fixup_chat_v1_keywords.py @@ -40,31 +40,32 @@ class chatCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'complete_import_space': ('name', ), - 'create_membership': ('parent', 'membership', ), + 'create_membership': ('parent', 'membership', 'use_admin_access', ), 'create_message': ('parent', 'message', 'thread_key', 'request_id', 'message_reply_option', 'message_id', ), 'create_reaction': ('parent', 'reaction', ), 'create_space': ('space', 'request_id', ), - 'delete_membership': ('name', ), + 'delete_membership': ('name', 'use_admin_access', ), 'delete_message': ('name', 'force', ), 'delete_reaction': ('name', ), - 'delete_space': ('name', ), + 'delete_space': ('name', 'use_admin_access', ), 'find_direct_message': ('name', ), 'get_attachment': ('name', ), - 'get_membership': ('name', ), + 'get_membership': ('name', 'use_admin_access', ), 'get_message': ('name', ), - 'get_space': ('name', ), + 'get_space': ('name', 'use_admin_access', ), 'get_space_event': ('name', ), 'get_space_read_state': ('name', ), 'get_thread_read_state': ('name', ), - 'list_memberships': ('parent', 'page_size', 'page_token', 'filter', 'show_groups', 'show_invited', ), + 'list_memberships': ('parent', 'page_size', 'page_token', 'filter', 'show_groups', 'show_invited', 'use_admin_access', ), 'list_messages': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'show_deleted', ), 'list_reactions': ('parent', 'page_size', 'page_token', 'filter', ), 'list_space_events': ('parent', 'filter', 'page_size', 'page_token', ), 'list_spaces': ('page_size', 'page_token', 'filter', ), + 'search_spaces': ('query', 'use_admin_access', 'page_size', 'page_token', 'order_by', ), 'set_up_space': ('space', 'request_id', 'memberships', ), - 'update_membership': ('membership', 'update_mask', ), + 'update_membership': ('membership', 'update_mask', 'use_admin_access', ), 'update_message': ('message', 'update_mask', 'allow_missing', ), - 'update_space': ('space', 'update_mask', ), + 'update_space': ('space', 'update_mask', 'use_admin_access', ), 'update_space_read_state': ('space_read_state', 'update_mask', ), 'upload_attachment': ('parent', 'filename', ), } diff --git a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py index 27ad0d358b7d..893992dd3ab5 100644 --- a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py +++ b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py @@ -5289,6 +5289,428 @@ async def test_list_spaces_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + space.SearchSpacesRequest, + dict, + ], +) +def test_search_spaces(request_type, transport: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = space.SearchSpacesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + response = client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = space.SearchSpacesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchSpacesPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_search_spaces_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.search_spaces() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == space.SearchSpacesRequest() + + +def test_search_spaces_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = space.SearchSpacesRequest( + page_token="page_token_value", + query="query_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.search_spaces(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == space.SearchSpacesRequest( + page_token="page_token_value", + query="query_value", + order_by="order_by_value", + ) + + +def test_search_spaces_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_spaces in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.search_spaces] = mock_rpc + request = {} + client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_spaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_search_spaces_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.SearchSpacesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.search_spaces() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == space.SearchSpacesRequest() + + +@pytest.mark.asyncio +async def test_search_spaces_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.search_spaces + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.search_spaces + ] = mock_rpc + + request = {} + await client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.search_spaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_search_spaces_async( + transport: str = "grpc_asyncio", request_type=space.SearchSpacesRequest +): + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.SearchSpacesResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.search_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = space.SearchSpacesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchSpacesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_search_spaces_async_from_dict(): + await test_search_spaces_async(request_type=dict) + + +def test_search_spaces_pager(transport_name: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.SearchSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.search_spaces(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, space.Space) for i in results) + + +def test_search_spaces_pages(transport_name: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.search_spaces), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.SearchSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + pages = list(client.search_spaces(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_search_spaces_async_pager(): + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_spaces), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.SearchSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_spaces( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, space.Space) for i in responses) + + +@pytest.mark.asyncio +async def test_search_spaces_async_pages(): + client = ChatServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_spaces), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.SearchSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.SearchSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.search_spaces(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ @@ -5322,6 +5744,7 @@ def test_get_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.get_space(request) @@ -5742,6 +6165,7 @@ def test_create_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.create_space(request) @@ -6105,6 +6529,7 @@ def test_set_up_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.set_up_space(request) @@ -6388,6 +6813,7 @@ def test_update_space(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.update_space(request) @@ -7461,6 +7887,7 @@ def test_find_direct_message(request_type, transport: str = "grpc"): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) response = client.find_direct_message(request) @@ -12657,6 +13084,11 @@ def test_create_message_rest(request_type): "drive_data_ref": {"drive_file_id": "drive_file_id_value"}, "mime_type": "mime_type_value", }, + "chat_space_link_data": { + "space": "space_value", + "thread": "thread_value", + "message": "message_value", + }, }, } ], @@ -12677,9 +13109,28 @@ def test_create_message_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, }, "fallback_text": "fallback_text_value", "action_response": { @@ -13607,6 +14058,7 @@ def test_list_memberships_rest_required_fields( "page_token", "show_groups", "show_invited", + "use_admin_access", ) ) jsonified_request.update(unset_fields) @@ -13670,6 +14122,7 @@ def test_list_memberships_rest_unset_required_fields(): "pageToken", "showGroups", "showInvited", + "useAdminAccess", ) ) & set(("parent",)) @@ -13982,6 +14435,8 @@ def test_get_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).get_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14035,7 +14490,7 @@ def test_get_membership_rest_unset_required_fields(): ) unset_fields = transport.get_membership._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -14790,6 +15245,11 @@ def test_update_message_rest(request_type): "drive_data_ref": {"drive_file_id": "drive_file_id_value"}, "mime_type": "mime_type_value", }, + "chat_space_link_data": { + "space": "space_value", + "thread": "thread_value", + "message": "message_value", + }, }, } ], @@ -14810,9 +15270,28 @@ def test_update_message_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, }, "fallback_text": "fallback_text_value", "action_response": { @@ -16003,7 +16482,170 @@ def test_upload_attachment_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_upload_attachment_rest_interceptors(null_interceptor): +def test_upload_attachment_rest_interceptors(null_interceptor): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ChatServiceRestInterceptor(), + ) + client = ChatServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_upload_attachment" + ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_upload_attachment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = attachment.UploadAttachmentRequest.pb( + attachment.UploadAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = attachment.UploadAttachmentResponse.to_json( + attachment.UploadAttachmentResponse() + ) + + request = attachment.UploadAttachmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = attachment.UploadAttachmentResponse() + + client.upload_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_upload_attachment_rest_bad_request( + transport: str = "rest", request_type=attachment.UploadAttachmentRequest +): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.upload_attachment(request) + + +def test_upload_attachment_rest_error(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + space.ListSpacesRequest, + dict, + ], +) +def test_list_spaces_rest(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = space.ListSpacesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = space.ListSpacesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_spaces(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSpacesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_spaces_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_spaces in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_spaces] = mock_rpc + + request = {} + client.list_spaces(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_spaces(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_spaces_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16016,15 +16658,13 @@ def test_upload_attachment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_upload_attachment" + transports.ChatServiceRestInterceptor, "post_list_spaces" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_upload_attachment" + transports.ChatServiceRestInterceptor, "pre_list_spaces" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = attachment.UploadAttachmentRequest.pb( - attachment.UploadAttachmentRequest() - ) + pb_message = space.ListSpacesRequest.pb(space.ListSpacesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16035,19 +16675,19 @@ def test_upload_attachment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = attachment.UploadAttachmentResponse.to_json( - attachment.UploadAttachmentResponse() + req.return_value._content = space.ListSpacesResponse.to_json( + space.ListSpacesResponse() ) - request = attachment.UploadAttachmentRequest() + request = space.ListSpacesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = attachment.UploadAttachmentResponse() + post.return_value = space.ListSpacesResponse() - client.upload_attachment( + client.list_spaces( request, metadata=[ ("key", "val"), @@ -16059,8 +16699,8 @@ def test_upload_attachment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_upload_attachment_rest_bad_request( - transport: str = "rest", request_type=attachment.UploadAttachmentRequest +def test_list_spaces_rest_bad_request( + transport: str = "rest", request_type=space.ListSpacesRequest ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16068,7 +16708,7 @@ def test_upload_attachment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "spaces/sample1"} + request_init = {} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16080,23 +16720,78 @@ def test_upload_attachment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.upload_attachment(request) + client.list_spaces(request) -def test_upload_attachment_rest_error(): +def test_list_spaces_rest_pager(transport: str = "rest"): client = ChatServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.ListSpacesResponse( + spaces=[], + next_page_token="def", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.ListSpacesResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(space.ListSpacesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_spaces(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, space.Space) for i in results) + + pages = list(client.list_spaces(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - space.ListSpacesRequest, + space.SearchSpacesRequest, dict, ], ) -def test_list_spaces_rest(request_type): +def test_search_spaces_rest(request_type): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16109,27 +16804,29 @@ def test_list_spaces_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = space.ListSpacesResponse( + return_value = space.SearchSpacesResponse( next_page_token="next_page_token_value", + total_size=1086, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = space.ListSpacesResponse.pb(return_value) + return_value = space.SearchSpacesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_spaces(request) + response = client.search_spaces(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSpacesPager) + assert isinstance(response, pagers.SearchSpacesPager) assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 -def test_list_spaces_rest_use_cached_wrapped_rpc(): +def test_search_spaces_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16143,30 +16840,141 @@ def test_list_spaces_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_spaces in client._transport._wrapped_methods + assert client._transport.search_spaces in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_spaces] = mock_rpc + client._transport._wrapped_methods[client._transport.search_spaces] = mock_rpc request = {} - client.list_spaces(request) + client.search_spaces(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_spaces(request) + client.search_spaces(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 +def test_search_spaces_rest_required_fields(request_type=space.SearchSpacesRequest): + transport_class = transports.ChatServiceRestTransport + + request_init = {} + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "query" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_spaces._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "query" in jsonified_request + assert jsonified_request["query"] == request_init["query"] + + jsonified_request["query"] = "query_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_spaces._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "order_by", + "page_size", + "page_token", + "query", + "use_admin_access", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" + + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = space.SearchSpacesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = space.SearchSpacesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.search_spaces(request) + + expected_params = [ + ( + "query", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_spaces_rest_unset_required_fields(): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search_spaces._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "orderBy", + "pageSize", + "pageToken", + "query", + "useAdminAccess", + ) + ) + & set(("query",)) + ) + + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_spaces_rest_interceptors(null_interceptor): +def test_search_spaces_rest_interceptors(null_interceptor): transport = transports.ChatServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16179,13 +16987,13 @@ def test_list_spaces_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ChatServiceRestInterceptor, "post_list_spaces" + transports.ChatServiceRestInterceptor, "post_search_spaces" ) as post, mock.patch.object( - transports.ChatServiceRestInterceptor, "pre_list_spaces" + transports.ChatServiceRestInterceptor, "pre_search_spaces" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = space.ListSpacesRequest.pb(space.ListSpacesRequest()) + pb_message = space.SearchSpacesRequest.pb(space.SearchSpacesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16196,19 +17004,19 @@ def test_list_spaces_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = space.ListSpacesResponse.to_json( - space.ListSpacesResponse() + req.return_value._content = space.SearchSpacesResponse.to_json( + space.SearchSpacesResponse() ) - request = space.ListSpacesRequest() + request = space.SearchSpacesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = space.ListSpacesResponse() + post.return_value = space.SearchSpacesResponse() - client.list_spaces( + client.search_spaces( request, metadata=[ ("key", "val"), @@ -16220,8 +17028,8 @@ def test_list_spaces_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_spaces_rest_bad_request( - transport: str = "rest", request_type=space.ListSpacesRequest +def test_search_spaces_rest_bad_request( + transport: str = "rest", request_type=space.SearchSpacesRequest ): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16241,10 +17049,10 @@ def test_list_spaces_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_spaces(request) + client.search_spaces(request) -def test_list_spaces_rest_pager(transport: str = "rest"): +def test_search_spaces_rest_pager(transport: str = "rest"): client = ChatServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16256,7 +17064,7 @@ def test_list_spaces_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -16264,17 +17072,17 @@ def test_list_spaces_rest_pager(transport: str = "rest"): ], next_page_token="abc", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[], next_page_token="def", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), ], next_page_token="ghi", ), - space.ListSpacesResponse( + space.SearchSpacesResponse( spaces=[ space.Space(), space.Space(), @@ -16285,7 +17093,7 @@ def test_list_spaces_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(space.ListSpacesResponse.to_json(x) for x in response) + response = tuple(space.SearchSpacesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -16294,13 +17102,13 @@ def test_list_spaces_rest_pager(transport: str = "rest"): sample_request = {} - pager = client.list_spaces(request=sample_request) + pager = client.search_spaces(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, space.Space) for i in results) - pages = list(client.list_spaces(request=sample_request).pages) + pages = list(client.search_spaces(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -16338,6 +17146,7 @@ def test_get_space_rest(request_type): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -16431,6 +17240,8 @@ def test_get_space_rest_required_fields(request_type=space.GetSpaceRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).get_space._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -16484,7 +17295,7 @@ def test_get_space_rest_unset_required_fields(): ) unset_fields = transport.get_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -16656,9 +17467,28 @@ def test_create_space_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {"seconds": 751, "nanos": 543}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -16745,6 +17575,7 @@ def get_message_fields(field): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -17061,6 +17892,7 @@ def test_set_up_space_rest(request_type): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -17320,9 +18152,28 @@ def test_update_space_rest(request_type): "space_history_state": 1, "import_mode": True, "create_time": {"seconds": 751, "nanos": 543}, + "last_active_time": {}, "admin_installed": True, + "membership_count": { + "joined_direct_human_user_count": 3185, + "joined_group_count": 1933, + }, "access_settings": {"access_state": 1, "audience": "audience_value"}, "space_uri": "space_uri_value", + "predefined_permission_settings": 1, + "permission_settings": { + "manage_members_and_groups": { + "managers_allowed": True, + "members_allowed": True, + }, + "modify_space_details": {}, + "toggle_history": {}, + "use_at_mention_all": {}, + "manage_apps": {}, + "manage_webhooks": {}, + "post_messages": {}, + "reply_messages": {}, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -17409,6 +18260,7 @@ def get_message_fields(field): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=gc_space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -17500,7 +18352,12 @@ def test_update_space_rest_required_fields(request_type=gc_space.UpdateSpaceRequ credentials=ga_credentials.AnonymousCredentials() ).update_space._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "update_mask", + "use_admin_access", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17553,7 +18410,15 @@ def test_update_space_rest_unset_required_fields(): ) unset_fields = transport.update_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("space",))) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "useAdminAccess", + ) + ) + & set(("space",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -17792,6 +18657,8 @@ def test_delete_space_rest_required_fields(request_type=space.DeleteSpaceRequest unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).delete_space._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17842,7 +18709,7 @@ def test_delete_space_rest_unset_required_fields(): ) unset_fields = transport.delete_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -18257,6 +19124,7 @@ def test_find_direct_message_rest(request_type): import_mode=True, admin_installed=True, space_uri="space_uri_value", + predefined_permission_settings=space.Space.PredefinedPermissionSettings.COLLABORATION_SPACE, ) # Wrap the value into a proper Response obj @@ -18697,6 +19565,8 @@ def test_create_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).create_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -18752,7 +19622,7 @@ def test_create_membership_rest_unset_required_fields(): unset_fields = transport.create_membership._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("useAdminAccess",)) & set( ( "parent", @@ -19095,7 +19965,12 @@ def test_update_membership_rest_required_fields( credentials=ga_credentials.AnonymousCredentials() ).update_membership._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "update_mask", + "use_admin_access", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19149,7 +20024,12 @@ def test_update_membership_rest_unset_required_fields(): unset_fields = transport.update_membership._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("updateMask",)) + set( + ( + "updateMask", + "useAdminAccess", + ) + ) & set( ( "membership", @@ -19413,6 +20293,8 @@ def test_delete_membership_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).delete_membership._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("use_admin_access",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -19466,7 +20348,7 @@ def test_delete_membership_rest_unset_required_fields(): ) unset_fields = transport.delete_membership._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + assert set(unset_fields) == (set(("useAdminAccess",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -22519,6 +23401,7 @@ def test_chat_service_base_transport(): "get_attachment", "upload_attachment", "list_spaces", + "search_spaces", "get_space", "create_space", "set_up_space", @@ -22912,6 +23795,9 @@ def test_chat_service_client_transport_session_collision(transport_name): session1 = client1.transport.list_spaces._session session2 = client2.transport.list_spaces._session assert session1 != session2 + session1 = client1.transport.search_spaces._session + session2 = client2.transport.search_spaces._session + assert session1 != session2 session1 = client1.transport.get_space._session session2 = client2.transport.get_space._session assert session1 != session2 diff --git a/packages/google-cloud-apihub/CHANGELOG.md b/packages/google-cloud-apihub/CHANGELOG.md index ae96d0e193c5..6b8fb34aaf0b 100644 --- a/packages/google-cloud-apihub/CHANGELOG.md +++ b/packages/google-cloud-apihub/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apihub-v0.1.0...google-cloud-apihub-v0.2.0) (2024-09-04) + + +### ⚠ BREAKING CHANGES + +* [google-cloud-apihub] remove gRPC support for client libraries ([#13055](https://github.com/googleapis/google-cloud-python/issues/13055)) + +### Bug Fixes + +* [google-cloud-apihub] remove gRPC support for client libraries ([#13055](https://github.com/googleapis/google-cloud-python/issues/13055)) ([3762ff4](https://github.com/googleapis/google-cloud-python/commit/3762ff40e51466bc516939a31732300c8e20211a)) + ## 0.1.0 (2024-08-08) diff --git a/packages/google-cloud-apihub/google/cloud/apihub/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub/__init__.py index 62b04504759e..72b5c1f8fbe5 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub/__init__.py @@ -18,35 +18,16 @@ __version__ = package_version.__version__ -from google.cloud.apihub_v1.services.api_hub.async_client import ApiHubAsyncClient from google.cloud.apihub_v1.services.api_hub.client import ApiHubClient -from google.cloud.apihub_v1.services.api_hub_dependencies.async_client import ( - ApiHubDependenciesAsyncClient, -) from google.cloud.apihub_v1.services.api_hub_dependencies.client import ( ApiHubDependenciesClient, ) -from google.cloud.apihub_v1.services.api_hub_plugin.async_client import ( - ApiHubPluginAsyncClient, -) from google.cloud.apihub_v1.services.api_hub_plugin.client import ApiHubPluginClient -from google.cloud.apihub_v1.services.host_project_registration_service.async_client import ( - HostProjectRegistrationServiceAsyncClient, -) from google.cloud.apihub_v1.services.host_project_registration_service.client import ( HostProjectRegistrationServiceClient, ) -from google.cloud.apihub_v1.services.linting_service.async_client import ( - LintingServiceAsyncClient, -) from google.cloud.apihub_v1.services.linting_service.client import LintingServiceClient -from google.cloud.apihub_v1.services.provisioning.async_client import ( - ProvisioningAsyncClient, -) from google.cloud.apihub_v1.services.provisioning.client import ProvisioningClient -from google.cloud.apihub_v1.services.runtime_project_attachment_service.async_client import ( - RuntimeProjectAttachmentServiceAsyncClient, -) from google.cloud.apihub_v1.services.runtime_project_attachment_service.client import ( RuntimeProjectAttachmentServiceClient, ) @@ -175,19 +156,12 @@ __all__ = ( "ApiHubClient", - "ApiHubAsyncClient", "ApiHubDependenciesClient", - "ApiHubDependenciesAsyncClient", "ApiHubPluginClient", - "ApiHubPluginAsyncClient", "HostProjectRegistrationServiceClient", - "HostProjectRegistrationServiceAsyncClient", "LintingServiceClient", - "LintingServiceAsyncClient", "ProvisioningClient", - "ProvisioningAsyncClient", "RuntimeProjectAttachmentServiceClient", - "RuntimeProjectAttachmentServiceAsyncClient", "ApiHubResource", "CreateApiRequest", "CreateAttributeRequest", diff --git a/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py +++ b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py index 27a89617ee68..ddde89662be7 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/__init__.py @@ -18,20 +18,15 @@ __version__ = package_version.__version__ -from .services.api_hub import ApiHubAsyncClient, ApiHubClient -from .services.api_hub_dependencies import ( - ApiHubDependenciesAsyncClient, - ApiHubDependenciesClient, -) -from .services.api_hub_plugin import ApiHubPluginAsyncClient, ApiHubPluginClient +from .services.api_hub import ApiHubClient +from .services.api_hub_dependencies import ApiHubDependenciesClient +from .services.api_hub_plugin import ApiHubPluginClient from .services.host_project_registration_service import ( - HostProjectRegistrationServiceAsyncClient, HostProjectRegistrationServiceClient, ) -from .services.linting_service import LintingServiceAsyncClient, LintingServiceClient -from .services.provisioning import ProvisioningAsyncClient, ProvisioningClient +from .services.linting_service import LintingServiceClient +from .services.provisioning import ProvisioningClient from .services.runtime_project_attachment_service import ( - RuntimeProjectAttachmentServiceAsyncClient, RuntimeProjectAttachmentServiceClient, ) from .types.apihub_service import ( @@ -158,13 +153,6 @@ ) __all__ = ( - "ApiHubAsyncClient", - "ApiHubDependenciesAsyncClient", - "ApiHubPluginAsyncClient", - "HostProjectRegistrationServiceAsyncClient", - "LintingServiceAsyncClient", - "ProvisioningAsyncClient", - "RuntimeProjectAttachmentServiceAsyncClient", "Api", "ApiHubClient", "ApiHubDependenciesClient", diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json index 079807dd2715..1585fa5ee448 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_metadata.json @@ -7,7 +7,7 @@ "services": { "ApiHub": { "clients": { - "grpc": { + "rest": { "libraryClient": "ApiHubClient", "rpcs": { "CreateApi": { @@ -186,728 +186,138 @@ ] } } - }, - "grpc-async": { - "libraryClient": "ApiHubAsyncClient", + } + } + }, + "ApiHubDependencies": { + "clients": { + "rest": { + "libraryClient": "ApiHubDependenciesClient", "rpcs": { - "CreateApi": { - "methods": [ - "create_api" - ] - }, - "CreateAttribute": { - "methods": [ - "create_attribute" - ] - }, - "CreateDeployment": { - "methods": [ - "create_deployment" - ] - }, - "CreateExternalApi": { - "methods": [ - "create_external_api" - ] - }, - "CreateSpec": { - "methods": [ - "create_spec" - ] - }, - "CreateVersion": { - "methods": [ - "create_version" - ] - }, - "DeleteApi": { - "methods": [ - "delete_api" - ] - }, - "DeleteAttribute": { - "methods": [ - "delete_attribute" - ] - }, - "DeleteDeployment": { - "methods": [ - "delete_deployment" - ] - }, - "DeleteExternalApi": { - "methods": [ - "delete_external_api" - ] - }, - "DeleteSpec": { - "methods": [ - "delete_spec" - ] - }, - "DeleteVersion": { - "methods": [ - "delete_version" - ] - }, - "GetApi": { - "methods": [ - "get_api" - ] - }, - "GetApiOperation": { - "methods": [ - "get_api_operation" - ] - }, - "GetAttribute": { - "methods": [ - "get_attribute" - ] - }, - "GetDefinition": { - "methods": [ - "get_definition" - ] - }, - "GetDeployment": { - "methods": [ - "get_deployment" - ] - }, - "GetExternalApi": { - "methods": [ - "get_external_api" - ] - }, - "GetSpec": { - "methods": [ - "get_spec" - ] - }, - "GetSpecContents": { - "methods": [ - "get_spec_contents" - ] - }, - "GetVersion": { - "methods": [ - "get_version" - ] - }, - "ListApiOperations": { - "methods": [ - "list_api_operations" - ] - }, - "ListApis": { - "methods": [ - "list_apis" - ] - }, - "ListAttributes": { - "methods": [ - "list_attributes" - ] - }, - "ListDeployments": { - "methods": [ - "list_deployments" - ] - }, - "ListExternalApis": { - "methods": [ - "list_external_apis" - ] - }, - "ListSpecs": { - "methods": [ - "list_specs" - ] - }, - "ListVersions": { + "CreateDependency": { "methods": [ - "list_versions" + "create_dependency" ] }, - "SearchResources": { + "DeleteDependency": { "methods": [ - "search_resources" + "delete_dependency" ] }, - "UpdateApi": { + "GetDependency": { "methods": [ - "update_api" + "get_dependency" ] }, - "UpdateAttribute": { + "ListDependencies": { "methods": [ - "update_attribute" + "list_dependencies" ] }, - "UpdateDeployment": { + "UpdateDependency": { "methods": [ - "update_deployment" + "update_dependency" ] - }, - "UpdateExternalApi": { + } + } + } + } + }, + "ApiHubPlugin": { + "clients": { + "rest": { + "libraryClient": "ApiHubPluginClient", + "rpcs": { + "DisablePlugin": { "methods": [ - "update_external_api" + "disable_plugin" ] }, - "UpdateSpec": { + "EnablePlugin": { "methods": [ - "update_spec" + "enable_plugin" ] }, - "UpdateVersion": { + "GetPlugin": { "methods": [ - "update_version" + "get_plugin" ] } } - }, + } + } + }, + "HostProjectRegistrationService": { + "clients": { "rest": { - "libraryClient": "ApiHubClient", + "libraryClient": "HostProjectRegistrationServiceClient", "rpcs": { - "CreateApi": { + "CreateHostProjectRegistration": { "methods": [ - "create_api" + "create_host_project_registration" ] }, - "CreateAttribute": { + "GetHostProjectRegistration": { "methods": [ - "create_attribute" + "get_host_project_registration" ] }, - "CreateDeployment": { + "ListHostProjectRegistrations": { "methods": [ - "create_deployment" + "list_host_project_registrations" ] - }, - "CreateExternalApi": { + } + } + } + } + }, + "LintingService": { + "clients": { + "rest": { + "libraryClient": "LintingServiceClient", + "rpcs": { + "GetStyleGuide": { "methods": [ - "create_external_api" + "get_style_guide" ] }, - "CreateSpec": { + "GetStyleGuideContents": { "methods": [ - "create_spec" + "get_style_guide_contents" ] }, - "CreateVersion": { + "LintSpec": { "methods": [ - "create_version" + "lint_spec" ] }, - "DeleteApi": { + "UpdateStyleGuide": { "methods": [ - "delete_api" + "update_style_guide" ] - }, - "DeleteAttribute": { + } + } + } + } + }, + "Provisioning": { + "clients": { + "rest": { + "libraryClient": "ProvisioningClient", + "rpcs": { + "CreateApiHubInstance": { "methods": [ - "delete_attribute" + "create_api_hub_instance" ] }, - "DeleteDeployment": { + "GetApiHubInstance": { "methods": [ - "delete_deployment" + "get_api_hub_instance" ] }, - "DeleteExternalApi": { + "LookupApiHubInstance": { "methods": [ - "delete_external_api" - ] - }, - "DeleteSpec": { - "methods": [ - "delete_spec" - ] - }, - "DeleteVersion": { - "methods": [ - "delete_version" - ] - }, - "GetApi": { - "methods": [ - "get_api" - ] - }, - "GetApiOperation": { - "methods": [ - "get_api_operation" - ] - }, - "GetAttribute": { - "methods": [ - "get_attribute" - ] - }, - "GetDefinition": { - "methods": [ - "get_definition" - ] - }, - "GetDeployment": { - "methods": [ - "get_deployment" - ] - }, - "GetExternalApi": { - "methods": [ - "get_external_api" - ] - }, - "GetSpec": { - "methods": [ - "get_spec" - ] - }, - "GetSpecContents": { - "methods": [ - "get_spec_contents" - ] - }, - "GetVersion": { - "methods": [ - "get_version" - ] - }, - "ListApiOperations": { - "methods": [ - "list_api_operations" - ] - }, - "ListApis": { - "methods": [ - "list_apis" - ] - }, - "ListAttributes": { - "methods": [ - "list_attributes" - ] - }, - "ListDeployments": { - "methods": [ - "list_deployments" - ] - }, - "ListExternalApis": { - "methods": [ - "list_external_apis" - ] - }, - "ListSpecs": { - "methods": [ - "list_specs" - ] - }, - "ListVersions": { - "methods": [ - "list_versions" - ] - }, - "SearchResources": { - "methods": [ - "search_resources" - ] - }, - "UpdateApi": { - "methods": [ - "update_api" - ] - }, - "UpdateAttribute": { - "methods": [ - "update_attribute" - ] - }, - "UpdateDeployment": { - "methods": [ - "update_deployment" - ] - }, - "UpdateExternalApi": { - "methods": [ - "update_external_api" - ] - }, - "UpdateSpec": { - "methods": [ - "update_spec" - ] - }, - "UpdateVersion": { - "methods": [ - "update_version" - ] - } - } - } - } - }, - "ApiHubDependencies": { - "clients": { - "grpc": { - "libraryClient": "ApiHubDependenciesClient", - "rpcs": { - "CreateDependency": { - "methods": [ - "create_dependency" - ] - }, - "DeleteDependency": { - "methods": [ - "delete_dependency" - ] - }, - "GetDependency": { - "methods": [ - "get_dependency" - ] - }, - "ListDependencies": { - "methods": [ - "list_dependencies" - ] - }, - "UpdateDependency": { - "methods": [ - "update_dependency" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ApiHubDependenciesAsyncClient", - "rpcs": { - "CreateDependency": { - "methods": [ - "create_dependency" - ] - }, - "DeleteDependency": { - "methods": [ - "delete_dependency" - ] - }, - "GetDependency": { - "methods": [ - "get_dependency" - ] - }, - "ListDependencies": { - "methods": [ - "list_dependencies" - ] - }, - "UpdateDependency": { - "methods": [ - "update_dependency" - ] - } - } - }, - "rest": { - "libraryClient": "ApiHubDependenciesClient", - "rpcs": { - "CreateDependency": { - "methods": [ - "create_dependency" - ] - }, - "DeleteDependency": { - "methods": [ - "delete_dependency" - ] - }, - "GetDependency": { - "methods": [ - "get_dependency" - ] - }, - "ListDependencies": { - "methods": [ - "list_dependencies" - ] - }, - "UpdateDependency": { - "methods": [ - "update_dependency" - ] - } - } - } - } - }, - "ApiHubPlugin": { - "clients": { - "grpc": { - "libraryClient": "ApiHubPluginClient", - "rpcs": { - "DisablePlugin": { - "methods": [ - "disable_plugin" - ] - }, - "EnablePlugin": { - "methods": [ - "enable_plugin" - ] - }, - "GetPlugin": { - "methods": [ - "get_plugin" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ApiHubPluginAsyncClient", - "rpcs": { - "DisablePlugin": { - "methods": [ - "disable_plugin" - ] - }, - "EnablePlugin": { - "methods": [ - "enable_plugin" - ] - }, - "GetPlugin": { - "methods": [ - "get_plugin" - ] - } - } - }, - "rest": { - "libraryClient": "ApiHubPluginClient", - "rpcs": { - "DisablePlugin": { - "methods": [ - "disable_plugin" - ] - }, - "EnablePlugin": { - "methods": [ - "enable_plugin" - ] - }, - "GetPlugin": { - "methods": [ - "get_plugin" - ] - } - } - } - } - }, - "HostProjectRegistrationService": { - "clients": { - "grpc": { - "libraryClient": "HostProjectRegistrationServiceClient", - "rpcs": { - "CreateHostProjectRegistration": { - "methods": [ - "create_host_project_registration" - ] - }, - "GetHostProjectRegistration": { - "methods": [ - "get_host_project_registration" - ] - }, - "ListHostProjectRegistrations": { - "methods": [ - "list_host_project_registrations" - ] - } - } - }, - "grpc-async": { - "libraryClient": "HostProjectRegistrationServiceAsyncClient", - "rpcs": { - "CreateHostProjectRegistration": { - "methods": [ - "create_host_project_registration" - ] - }, - "GetHostProjectRegistration": { - "methods": [ - "get_host_project_registration" - ] - }, - "ListHostProjectRegistrations": { - "methods": [ - "list_host_project_registrations" - ] - } - } - }, - "rest": { - "libraryClient": "HostProjectRegistrationServiceClient", - "rpcs": { - "CreateHostProjectRegistration": { - "methods": [ - "create_host_project_registration" - ] - }, - "GetHostProjectRegistration": { - "methods": [ - "get_host_project_registration" - ] - }, - "ListHostProjectRegistrations": { - "methods": [ - "list_host_project_registrations" - ] - } - } - } - } - }, - "LintingService": { - "clients": { - "grpc": { - "libraryClient": "LintingServiceClient", - "rpcs": { - "GetStyleGuide": { - "methods": [ - "get_style_guide" - ] - }, - "GetStyleGuideContents": { - "methods": [ - "get_style_guide_contents" - ] - }, - "LintSpec": { - "methods": [ - "lint_spec" - ] - }, - "UpdateStyleGuide": { - "methods": [ - "update_style_guide" - ] - } - } - }, - "grpc-async": { - "libraryClient": "LintingServiceAsyncClient", - "rpcs": { - "GetStyleGuide": { - "methods": [ - "get_style_guide" - ] - }, - "GetStyleGuideContents": { - "methods": [ - "get_style_guide_contents" - ] - }, - "LintSpec": { - "methods": [ - "lint_spec" - ] - }, - "UpdateStyleGuide": { - "methods": [ - "update_style_guide" - ] - } - } - }, - "rest": { - "libraryClient": "LintingServiceClient", - "rpcs": { - "GetStyleGuide": { - "methods": [ - "get_style_guide" - ] - }, - "GetStyleGuideContents": { - "methods": [ - "get_style_guide_contents" - ] - }, - "LintSpec": { - "methods": [ - "lint_spec" - ] - }, - "UpdateStyleGuide": { - "methods": [ - "update_style_guide" - ] - } - } - } - } - }, - "Provisioning": { - "clients": { - "grpc": { - "libraryClient": "ProvisioningClient", - "rpcs": { - "CreateApiHubInstance": { - "methods": [ - "create_api_hub_instance" - ] - }, - "GetApiHubInstance": { - "methods": [ - "get_api_hub_instance" - ] - }, - "LookupApiHubInstance": { - "methods": [ - "lookup_api_hub_instance" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ProvisioningAsyncClient", - "rpcs": { - "CreateApiHubInstance": { - "methods": [ - "create_api_hub_instance" - ] - }, - "GetApiHubInstance": { - "methods": [ - "get_api_hub_instance" - ] - }, - "LookupApiHubInstance": { - "methods": [ - "lookup_api_hub_instance" - ] - } - } - }, - "rest": { - "libraryClient": "ProvisioningClient", - "rpcs": { - "CreateApiHubInstance": { - "methods": [ - "create_api_hub_instance" - ] - }, - "GetApiHubInstance": { - "methods": [ - "get_api_hub_instance" - ] - }, - "LookupApiHubInstance": { - "methods": [ - "lookup_api_hub_instance" + "lookup_api_hub_instance" ] } } @@ -916,66 +326,6 @@ }, "RuntimeProjectAttachmentService": { "clients": { - "grpc": { - "libraryClient": "RuntimeProjectAttachmentServiceClient", - "rpcs": { - "CreateRuntimeProjectAttachment": { - "methods": [ - "create_runtime_project_attachment" - ] - }, - "DeleteRuntimeProjectAttachment": { - "methods": [ - "delete_runtime_project_attachment" - ] - }, - "GetRuntimeProjectAttachment": { - "methods": [ - "get_runtime_project_attachment" - ] - }, - "ListRuntimeProjectAttachments": { - "methods": [ - "list_runtime_project_attachments" - ] - }, - "LookupRuntimeProjectAttachment": { - "methods": [ - "lookup_runtime_project_attachment" - ] - } - } - }, - "grpc-async": { - "libraryClient": "RuntimeProjectAttachmentServiceAsyncClient", - "rpcs": { - "CreateRuntimeProjectAttachment": { - "methods": [ - "create_runtime_project_attachment" - ] - }, - "DeleteRuntimeProjectAttachment": { - "methods": [ - "delete_runtime_project_attachment" - ] - }, - "GetRuntimeProjectAttachment": { - "methods": [ - "get_runtime_project_attachment" - ] - }, - "ListRuntimeProjectAttachments": { - "methods": [ - "list_runtime_project_attachments" - ] - }, - "LookupRuntimeProjectAttachment": { - "methods": [ - "lookup_runtime_project_attachment" - ] - } - } - }, "rest": { "libraryClient": "RuntimeProjectAttachmentServiceClient", "rpcs": { diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py index 8da758214ef9..0d50a3548806 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import ApiHubAsyncClient from .client import ApiHubClient -__all__ = ( - "ApiHubClient", - "ApiHubAsyncClient", -) +__all__ = ("ApiHubClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py index 69f73e4792ec..77ddc5472962 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py @@ -57,8 +57,6 @@ from google.cloud.apihub_v1.types import apihub_service, common_fields from .transports.base import DEFAULT_CLIENT_INFO, ApiHubTransport -from .transports.grpc import ApiHubGrpcTransport -from .transports.grpc_asyncio import ApiHubGrpcAsyncIOTransport from .transports.rest import ApiHubRestTransport @@ -71,8 +69,6 @@ class ApiHubClientMeta(type): """ _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubTransport]] - _transport_registry["grpc"] = ApiHubGrpcTransport - _transport_registry["grpc_asyncio"] = ApiHubGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py index e5b37bd42192..510192a2f321 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/pagers.py @@ -115,84 +115,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListApisAsyncPager: - """A pager for iterating through ``list_apis`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListApisResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``apis`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListApis`` requests and continue to iterate - through the ``apis`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListApisResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListApisResponse]], - request: apihub_service.ListApisRequest, - response: apihub_service.ListApisResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListApisRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListApisResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListApisRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListApisResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Api]: - async def async_generator(): - async for page in self.pages: - for response in page.apis: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListVersionsPager: """A pager for iterating through ``list_versions`` requests. @@ -267,84 +189,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListVersionsAsyncPager: - """A pager for iterating through ``list_versions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListVersionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``versions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListVersions`` requests and continue to iterate - through the ``versions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListVersionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListVersionsResponse]], - request: apihub_service.ListVersionsRequest, - response: apihub_service.ListVersionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListVersionsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListVersionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListVersionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListVersionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Version]: - async def async_generator(): - async for page in self.pages: - for response in page.versions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListSpecsPager: """A pager for iterating through ``list_specs`` requests. @@ -419,84 +263,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListSpecsAsyncPager: - """A pager for iterating through ``list_specs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListSpecsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``specs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSpecs`` requests and continue to iterate - through the ``specs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListSpecsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListSpecsResponse]], - request: apihub_service.ListSpecsRequest, - response: apihub_service.ListSpecsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListSpecsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListSpecsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListSpecsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListSpecsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Spec]: - async def async_generator(): - async for page in self.pages: - for response in page.specs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListApiOperationsPager: """A pager for iterating through ``list_api_operations`` requests. @@ -571,84 +337,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListApiOperationsAsyncPager: - """A pager for iterating through ``list_api_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListApiOperationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``api_operations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListApiOperations`` requests and continue to iterate - through the ``api_operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListApiOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListApiOperationsResponse]], - request: apihub_service.ListApiOperationsRequest, - response: apihub_service.ListApiOperationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListApiOperationsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListApiOperationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListApiOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListApiOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.ApiOperation]: - async def async_generator(): - async for page in self.pages: - for response in page.api_operations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListDeploymentsPager: """A pager for iterating through ``list_deployments`` requests. @@ -723,84 +411,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListDeploymentsAsyncPager: - """A pager for iterating through ``list_deployments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListDeploymentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``deployments`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDeployments`` requests and continue to iterate - through the ``deployments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListDeploymentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListDeploymentsResponse]], - request: apihub_service.ListDeploymentsRequest, - response: apihub_service.ListDeploymentsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListDeploymentsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListDeploymentsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListDeploymentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListDeploymentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Deployment]: - async def async_generator(): - async for page in self.pages: - for response in page.deployments: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListAttributesPager: """A pager for iterating through ``list_attributes`` requests. @@ -875,84 +485,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class ListAttributesAsyncPager: - """A pager for iterating through ``list_attributes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListAttributesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``attributes`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAttributes`` requests and continue to iterate - through the ``attributes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListAttributesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListAttributesResponse]], - request: apihub_service.ListAttributesRequest, - response: apihub_service.ListAttributesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListAttributesRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListAttributesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListAttributesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListAttributesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Attribute]: - async def async_generator(): - async for page in self.pages: - for response in page.attributes: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class SearchResourcesPager: """A pager for iterating through ``search_resources`` requests. @@ -1027,84 +559,6 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) -class SearchResourcesAsyncPager: - """A pager for iterating through ``search_resources`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.SearchResourcesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``search_results`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchResources`` requests and continue to iterate - through the ``search_results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.SearchResourcesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.SearchResourcesResponse]], - request: apihub_service.SearchResourcesRequest, - response: apihub_service.SearchResourcesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.SearchResourcesRequest): - The initial request object. - response (google.cloud.apihub_v1.types.SearchResourcesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.SearchResourcesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.SearchResourcesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[apihub_service.SearchResult]: - async def async_generator(): - async for page in self.pages: - for response in page.search_results: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - class ListExternalApisPager: """A pager for iterating through ``list_external_apis`` requests. @@ -1177,81 +631,3 @@ def __iter__(self) -> Iterator[common_fields.ExternalApi]: def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListExternalApisAsyncPager: - """A pager for iterating through ``list_external_apis`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListExternalApisResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``external_apis`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListExternalApis`` requests and continue to iterate - through the ``external_apis`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListExternalApisResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListExternalApisResponse]], - request: apihub_service.ListExternalApisRequest, - response: apihub_service.ListExternalApisResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListExternalApisRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListExternalApisResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListExternalApisRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListExternalApisResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.ExternalApi]: - async def async_generator(): - async for page in self.pages: - for response in page.external_apis: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py index ae6fa9e02afe..904125024a7b 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/__init__.py @@ -17,20 +17,14 @@ from typing import Dict, Type from .base import ApiHubTransport -from .grpc import ApiHubGrpcTransport -from .grpc_asyncio import ApiHubGrpcAsyncIOTransport from .rest import ApiHubRestInterceptor, ApiHubRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubTransport]] -_transport_registry["grpc"] = ApiHubGrpcTransport -_transport_registry["grpc_asyncio"] = ApiHubGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubRestTransport __all__ = ( "ApiHubTransport", - "ApiHubGrpcTransport", - "ApiHubGrpcAsyncIOTransport", "ApiHubRestTransport", "ApiHubRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py index 9727d7d5b0d7..146b28fe4729 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import ApiHubDependenciesAsyncClient from .client import ApiHubDependenciesClient -__all__ = ( - "ApiHubDependenciesClient", - "ApiHubDependenciesAsyncClient", -) +__all__ = ("ApiHubDependenciesClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py index 1c70a2416c8e..70a952fe282c 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py @@ -57,8 +57,6 @@ from google.cloud.apihub_v1.types import apihub_service, common_fields from .transports.base import DEFAULT_CLIENT_INFO, ApiHubDependenciesTransport -from .transports.grpc import ApiHubDependenciesGrpcTransport -from .transports.grpc_asyncio import ApiHubDependenciesGrpcAsyncIOTransport from .transports.rest import ApiHubDependenciesRestTransport @@ -73,8 +71,6 @@ class ApiHubDependenciesClientMeta(type): _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[ApiHubDependenciesTransport]] - _transport_registry["grpc"] = ApiHubDependenciesGrpcTransport - _transport_registry["grpc_asyncio"] = ApiHubDependenciesGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubDependenciesRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py index 89cdfff15348..6f0fa634f84d 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/pagers.py @@ -113,81 +113,3 @@ def __iter__(self) -> Iterator[common_fields.Dependency]: def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListDependenciesAsyncPager: - """A pager for iterating through ``list_dependencies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListDependenciesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``dependencies`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDependencies`` requests and continue to iterate - through the ``dependencies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListDependenciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[apihub_service.ListDependenciesResponse]], - request: apihub_service.ListDependenciesRequest, - response: apihub_service.ListDependenciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListDependenciesRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListDependenciesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = apihub_service.ListDependenciesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[apihub_service.ListDependenciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__(self) -> AsyncIterator[common_fields.Dependency]: - async def async_generator(): - async for page in self.pages: - for response in page.dependencies: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py index 8327d8408a2e..5de2b44a3808 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/__init__.py @@ -17,22 +17,16 @@ from typing import Dict, Type from .base import ApiHubDependenciesTransport -from .grpc import ApiHubDependenciesGrpcTransport -from .grpc_asyncio import ApiHubDependenciesGrpcAsyncIOTransport from .rest import ApiHubDependenciesRestInterceptor, ApiHubDependenciesRestTransport # Compile a registry of transports. _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[ApiHubDependenciesTransport]] -_transport_registry["grpc"] = ApiHubDependenciesGrpcTransport -_transport_registry["grpc_asyncio"] = ApiHubDependenciesGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubDependenciesRestTransport __all__ = ( "ApiHubDependenciesTransport", - "ApiHubDependenciesGrpcTransport", - "ApiHubDependenciesGrpcAsyncIOTransport", "ApiHubDependenciesRestTransport", "ApiHubDependenciesRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py index 3d883e94c9c6..5cbea89992b0 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import ApiHubPluginAsyncClient from .client import ApiHubPluginClient -__all__ = ( - "ApiHubPluginClient", - "ApiHubPluginAsyncClient", -) +__all__ = ("ApiHubPluginClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py index 5f6283c74876..dbfedb9a41d0 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py @@ -54,8 +54,6 @@ from google.cloud.apihub_v1.types import common_fields, plugin_service from .transports.base import DEFAULT_CLIENT_INFO, ApiHubPluginTransport -from .transports.grpc import ApiHubPluginGrpcTransport -from .transports.grpc_asyncio import ApiHubPluginGrpcAsyncIOTransport from .transports.rest import ApiHubPluginRestTransport @@ -68,8 +66,6 @@ class ApiHubPluginClientMeta(type): """ _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubPluginTransport]] - _transport_registry["grpc"] = ApiHubPluginGrpcTransport - _transport_registry["grpc_asyncio"] = ApiHubPluginGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubPluginRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py index 33a3043c2375..9ecb3eaee613 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/__init__.py @@ -17,20 +17,14 @@ from typing import Dict, Type from .base import ApiHubPluginTransport -from .grpc import ApiHubPluginGrpcTransport -from .grpc_asyncio import ApiHubPluginGrpcAsyncIOTransport from .rest import ApiHubPluginRestInterceptor, ApiHubPluginRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ApiHubPluginTransport]] -_transport_registry["grpc"] = ApiHubPluginGrpcTransport -_transport_registry["grpc_asyncio"] = ApiHubPluginGrpcAsyncIOTransport _transport_registry["rest"] = ApiHubPluginRestTransport __all__ = ( "ApiHubPluginTransport", - "ApiHubPluginGrpcTransport", - "ApiHubPluginGrpcAsyncIOTransport", "ApiHubPluginRestTransport", "ApiHubPluginRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py index f08d02f0c7b6..f5f90e47cdb9 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import HostProjectRegistrationServiceAsyncClient from .client import HostProjectRegistrationServiceClient -__all__ = ( - "HostProjectRegistrationServiceClient", - "HostProjectRegistrationServiceAsyncClient", -) +__all__ = ("HostProjectRegistrationServiceClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py index 89784ae2dd03..2e5897cdad20 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py @@ -59,8 +59,6 @@ DEFAULT_CLIENT_INFO, HostProjectRegistrationServiceTransport, ) -from .transports.grpc import HostProjectRegistrationServiceGrpcTransport -from .transports.grpc_asyncio import HostProjectRegistrationServiceGrpcAsyncIOTransport from .transports.rest import HostProjectRegistrationServiceRestTransport @@ -75,10 +73,6 @@ class HostProjectRegistrationServiceClientMeta(type): _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[HostProjectRegistrationServiceTransport]] - _transport_registry["grpc"] = HostProjectRegistrationServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = HostProjectRegistrationServiceGrpcAsyncIOTransport _transport_registry["rest"] = HostProjectRegistrationServiceRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py index 2248115ec790..4bb7e2ec7541 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/pagers.py @@ -125,96 +125,3 @@ def __iter__( def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListHostProjectRegistrationsAsyncPager: - """A pager for iterating through ``list_host_project_registrations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListHostProjectRegistrationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``host_project_registrations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListHostProjectRegistrations`` requests and continue to iterate - through the ``host_project_registrations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListHostProjectRegistrationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[ - ..., - Awaitable[ - host_project_registration_service.ListHostProjectRegistrationsResponse - ], - ], - request: host_project_registration_service.ListHostProjectRegistrationsRequest, - response: host_project_registration_service.ListHostProjectRegistrationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListHostProjectRegistrationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = ( - host_project_registration_service.ListHostProjectRegistrationsRequest( - request - ) - ) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages( - self, - ) -> AsyncIterator[ - host_project_registration_service.ListHostProjectRegistrationsResponse - ]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__( - self, - ) -> AsyncIterator[host_project_registration_service.HostProjectRegistration]: - async def async_generator(): - async for page in self.pages: - for response in page.host_project_registrations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py index 2352c478fef0..c80657406ff6 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/__init__.py @@ -17,8 +17,6 @@ from typing import Dict, Type from .base import HostProjectRegistrationServiceTransport -from .grpc import HostProjectRegistrationServiceGrpcTransport -from .grpc_asyncio import HostProjectRegistrationServiceGrpcAsyncIOTransport from .rest import ( HostProjectRegistrationServiceRestInterceptor, HostProjectRegistrationServiceRestTransport, @@ -28,14 +26,10 @@ _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[HostProjectRegistrationServiceTransport]] -_transport_registry["grpc"] = HostProjectRegistrationServiceGrpcTransport -_transport_registry["grpc_asyncio"] = HostProjectRegistrationServiceGrpcAsyncIOTransport _transport_registry["rest"] = HostProjectRegistrationServiceRestTransport __all__ = ( "HostProjectRegistrationServiceTransport", - "HostProjectRegistrationServiceGrpcTransport", - "HostProjectRegistrationServiceGrpcAsyncIOTransport", "HostProjectRegistrationServiceRestTransport", "HostProjectRegistrationServiceRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py index 70436549c9b7..68f5fe54993b 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import LintingServiceAsyncClient from .client import LintingServiceClient -__all__ = ( - "LintingServiceClient", - "LintingServiceAsyncClient", -) +__all__ = ("LintingServiceClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py index 75930326f9d9..608153448c3b 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py @@ -55,8 +55,6 @@ from google.cloud.apihub_v1.types import common_fields, linting_service from .transports.base import DEFAULT_CLIENT_INFO, LintingServiceTransport -from .transports.grpc import LintingServiceGrpcTransport -from .transports.grpc_asyncio import LintingServiceGrpcAsyncIOTransport from .transports.rest import LintingServiceRestTransport @@ -71,8 +69,6 @@ class LintingServiceClientMeta(type): _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[LintingServiceTransport]] - _transport_registry["grpc"] = LintingServiceGrpcTransport - _transport_registry["grpc_asyncio"] = LintingServiceGrpcAsyncIOTransport _transport_registry["rest"] = LintingServiceRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py index d9f3131d4481..f8d2f54aac8c 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/__init__.py @@ -17,20 +17,14 @@ from typing import Dict, Type from .base import LintingServiceTransport -from .grpc import LintingServiceGrpcTransport -from .grpc_asyncio import LintingServiceGrpcAsyncIOTransport from .rest import LintingServiceRestInterceptor, LintingServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[LintingServiceTransport]] -_transport_registry["grpc"] = LintingServiceGrpcTransport -_transport_registry["grpc_asyncio"] = LintingServiceGrpcAsyncIOTransport _transport_registry["rest"] = LintingServiceRestTransport __all__ = ( "LintingServiceTransport", - "LintingServiceGrpcTransport", - "LintingServiceGrpcAsyncIOTransport", "LintingServiceRestTransport", "LintingServiceRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py index 894b56bb9b82..3df245148ed6 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import ProvisioningAsyncClient from .client import ProvisioningClient -__all__ = ( - "ProvisioningClient", - "ProvisioningAsyncClient", -) +__all__ = ("ProvisioningClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py index c39738ccb878..56a83e91bd00 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py @@ -57,8 +57,6 @@ from google.cloud.apihub_v1.types import common_fields, provisioning_service from .transports.base import DEFAULT_CLIENT_INFO, ProvisioningTransport -from .transports.grpc import ProvisioningGrpcTransport -from .transports.grpc_asyncio import ProvisioningGrpcAsyncIOTransport from .transports.rest import ProvisioningRestTransport @@ -71,8 +69,6 @@ class ProvisioningClientMeta(type): """ _transport_registry = OrderedDict() # type: Dict[str, Type[ProvisioningTransport]] - _transport_registry["grpc"] = ProvisioningGrpcTransport - _transport_registry["grpc_asyncio"] = ProvisioningGrpcAsyncIOTransport _transport_registry["rest"] = ProvisioningRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py index a1a997220440..c82beafe4a3e 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/__init__.py @@ -17,20 +17,14 @@ from typing import Dict, Type from .base import ProvisioningTransport -from .grpc import ProvisioningGrpcTransport -from .grpc_asyncio import ProvisioningGrpcAsyncIOTransport from .rest import ProvisioningRestInterceptor, ProvisioningRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ProvisioningTransport]] -_transport_registry["grpc"] = ProvisioningGrpcTransport -_transport_registry["grpc_asyncio"] = ProvisioningGrpcAsyncIOTransport _transport_registry["rest"] = ProvisioningRestTransport __all__ = ( "ProvisioningTransport", - "ProvisioningGrpcTransport", - "ProvisioningGrpcAsyncIOTransport", "ProvisioningRestTransport", "ProvisioningRestInterceptor", ) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py index 53b21be76ec7..28875a7e7af2 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/__init__.py @@ -13,10 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .async_client import RuntimeProjectAttachmentServiceAsyncClient from .client import RuntimeProjectAttachmentServiceClient -__all__ = ( - "RuntimeProjectAttachmentServiceClient", - "RuntimeProjectAttachmentServiceAsyncClient", -) +__all__ = ("RuntimeProjectAttachmentServiceClient",) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py index 121ce0bf5470..dc9c1039381a 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py @@ -59,8 +59,6 @@ DEFAULT_CLIENT_INFO, RuntimeProjectAttachmentServiceTransport, ) -from .transports.grpc import RuntimeProjectAttachmentServiceGrpcTransport -from .transports.grpc_asyncio import RuntimeProjectAttachmentServiceGrpcAsyncIOTransport from .transports.rest import RuntimeProjectAttachmentServiceRestTransport @@ -75,10 +73,6 @@ class RuntimeProjectAttachmentServiceClientMeta(type): _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[RuntimeProjectAttachmentServiceTransport]] - _transport_registry["grpc"] = RuntimeProjectAttachmentServiceGrpcTransport - _transport_registry[ - "grpc_asyncio" - ] = RuntimeProjectAttachmentServiceGrpcAsyncIOTransport _transport_registry["rest"] = RuntimeProjectAttachmentServiceRestTransport def get_transport_class( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py index 05ba311342e2..7e63e765df51 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/pagers.py @@ -126,96 +126,3 @@ def __iter__( def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListRuntimeProjectAttachmentsAsyncPager: - """A pager for iterating through ``list_runtime_project_attachments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``runtime_project_attachments`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListRuntimeProjectAttachments`` requests and continue to iterate - through the ``runtime_project_attachments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[ - ..., - Awaitable[ - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse - ], - ], - request: runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, - response: runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest): - The initial request object. - response (google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest( - request - ) - ) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages( - self, - ) -> AsyncIterator[ - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse - ]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method( - self._request, - retry=self._retry, - timeout=self._timeout, - metadata=self._metadata, - ) - yield self._response - - def __aiter__( - self, - ) -> AsyncIterator[runtime_project_attachment_service.RuntimeProjectAttachment]: - async def async_generator(): - async for page in self.pages: - for response in page.runtime_project_attachments: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py index e7fe76d5503e..604d33074e46 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/__init__.py @@ -17,8 +17,6 @@ from typing import Dict, Type from .base import RuntimeProjectAttachmentServiceTransport -from .grpc import RuntimeProjectAttachmentServiceGrpcTransport -from .grpc_asyncio import RuntimeProjectAttachmentServiceGrpcAsyncIOTransport from .rest import ( RuntimeProjectAttachmentServiceRestInterceptor, RuntimeProjectAttachmentServiceRestTransport, @@ -28,16 +26,10 @@ _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[RuntimeProjectAttachmentServiceTransport]] -_transport_registry["grpc"] = RuntimeProjectAttachmentServiceGrpcTransport -_transport_registry[ - "grpc_asyncio" -] = RuntimeProjectAttachmentServiceGrpcAsyncIOTransport _transport_registry["rest"] = RuntimeProjectAttachmentServiceRestTransport __all__ = ( "RuntimeProjectAttachmentServiceTransport", - "RuntimeProjectAttachmentServiceGrpcTransport", - "RuntimeProjectAttachmentServiceGrpcAsyncIOTransport", "RuntimeProjectAttachmentServiceRestTransport", "RuntimeProjectAttachmentServiceRestInterceptor", ) diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_async.py deleted file mode 100644 index 9dcedc8d971f..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_api_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteApiRequest( - name="name_value", - ) - - # Make the request - await client.delete_api(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_async.py deleted file mode 100644 index 3c139efd73c2..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_attribute_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_attribute(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteAttributeRequest( - name="name_value", - ) - - # Make the request - await client.delete_attribute(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteAttribute_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_async.py deleted file mode 100644 index c187acdcc75e..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_deployment_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDeployment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteDeployment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_deployment(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteDeploymentRequest( - name="name_value", - ) - - # Make the request - await client.delete_deployment(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteDeployment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_async.py deleted file mode 100644 index ee72bfdb2cb5..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_external_api_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteExternalApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteExternalApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_external_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteExternalApiRequest( - name="name_value", - ) - - # Make the request - await client.delete_external_api(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteExternalApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_async.py deleted file mode 100644 index 18bf9aa527da..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_spec_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_spec(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteSpecRequest( - name="name_value", - ) - - # Make the request - await client.delete_spec(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_async.py deleted file mode 100644 index a84532d3859c..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_delete_version_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteVersion -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_DeleteVersion_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_version(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteVersionRequest( - name="name_value", - ) - - # Make the request - await client.delete_version(request=request) - - -# [END apihub_v1_generated_ApiHub_DeleteVersion_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_async.py deleted file mode 100644 index bc37ee587379..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_create_dependency_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDependency -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubDependencies_CreateDependency_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_dependency(): - # Create a client - client = apihub_v1.ApiHubDependenciesAsyncClient() - - # Initialize request argument(s) - dependency = apihub_v1.Dependency() - dependency.consumer.operation_resource_name = "operation_resource_name_value" - dependency.supplier.operation_resource_name = "operation_resource_name_value" - - request = apihub_v1.CreateDependencyRequest( - parent="parent_value", - dependency=dependency, - ) - - # Make the request - response = await client.create_dependency(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubDependencies_CreateDependency_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py deleted file mode 100644 index af78f08fa7cd..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDependency -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubDependencies_DeleteDependency_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_dependency(): - # Create a client - client = apihub_v1.ApiHubDependenciesAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteDependencyRequest( - name="name_value", - ) - - # Make the request - await client.delete_dependency(request=request) - - -# [END apihub_v1_generated_ApiHubDependencies_DeleteDependency_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_async.py deleted file mode 100644 index 37b5b624db53..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_dependencies_update_dependency_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDependency -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubDependencies_UpdateDependency_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_dependency(): - # Create a client - client = apihub_v1.ApiHubDependenciesAsyncClient() - - # Initialize request argument(s) - dependency = apihub_v1.Dependency() - dependency.consumer.operation_resource_name = "operation_resource_name_value" - dependency.supplier.operation_resource_name = "operation_resource_name_value" - - request = apihub_v1.UpdateDependencyRequest( - dependency=dependency, - ) - - # Make the request - response = await client.update_dependency(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubDependencies_UpdateDependency_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_async.py deleted file mode 100644 index 884fa8a495b2..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_operation_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetApiOperation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetApiOperation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_api_operation(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetApiOperationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_api_operation(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetApiOperation_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_async.py deleted file mode 100644 index b1b62ba74864..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_definition_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDefinition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetDefinition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_definition(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetDefinitionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_definition(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetDefinition_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_async.py deleted file mode 100644 index a16639adc526..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_deployment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDeployment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetDeployment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_deployment(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetDeploymentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_deployment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetDeployment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_async.py deleted file mode 100644 index 2e44849e8c40..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_external_api_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetExternalApi -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetExternalApi_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_external_api(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetExternalApiRequest( - name="name_value", - ) - - # Make the request - response = await client.get_external_api(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetExternalApi_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_async.py deleted file mode 100644 index 5679acd29ccd..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_spec(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetSpecRequest( - name="name_value", - ) - - # Make the request - response = await client.get_spec(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_async.py deleted file mode 100644 index ffaf9eaa238b..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_spec_contents_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSpecContents -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetSpecContents_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_spec_contents(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetSpecContentsRequest( - name="name_value", - ) - - # Make the request - response = await client.get_spec_contents(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetSpecContents_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_async.py deleted file mode 100644 index 2c7b1d375fee..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_version_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetVersion -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_GetVersion_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_version(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetVersionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_version(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_GetVersion_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_async.py deleted file mode 100644 index d45eefcb5fa6..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_external_apis_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListExternalApis -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListExternalApis_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_external_apis(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListExternalApisRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_external_apis(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListExternalApis_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_async.py deleted file mode 100644 index 795e376e27cf..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_versions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListVersions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_ListVersions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_versions(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListVersionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_versions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_ListVersions_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_async.py deleted file mode 100644 index 5c1773b84f6a..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_disable_plugin_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DisablePlugin -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubPlugin_DisablePlugin_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_disable_plugin(): - # Create a client - client = apihub_v1.ApiHubPluginAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DisablePluginRequest( - name="name_value", - ) - - # Make the request - response = await client.disable_plugin(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubPlugin_DisablePlugin_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_async.py deleted file mode 100644 index c2dbee38cd0b..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_enable_plugin_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for EnablePlugin -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubPlugin_EnablePlugin_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_enable_plugin(): - # Create a client - client = apihub_v1.ApiHubPluginAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.EnablePluginRequest( - name="name_value", - ) - - # Make the request - response = await client.enable_plugin(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubPlugin_EnablePlugin_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_async.py deleted file mode 100644 index 8ced7daffafe..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_plugin_get_plugin_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetPlugin -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHubPlugin_GetPlugin_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_get_plugin(): - # Create a client - client = apihub_v1.ApiHubPluginAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.GetPluginRequest( - name="name_value", - ) - - # Make the request - response = await client.get_plugin(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHubPlugin_GetPlugin_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_async.py deleted file mode 100644 index 538e2cee6af0..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_search_resources_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchResources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_SearchResources_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_search_resources(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.SearchResourcesRequest( - location="location_value", - query="query_value", - ) - - # Make the request - page_result = client.search_resources(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_ApiHub_SearchResources_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_async.py deleted file mode 100644 index b0583bcadadb..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_attribute_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAttribute -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateAttribute_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_attribute(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - attribute = apihub_v1.Attribute() - attribute.display_name = "display_name_value" - attribute.scope = "PLUGIN" - attribute.data_type = "STRING" - - request = apihub_v1.UpdateAttributeRequest( - attribute=attribute, - ) - - # Make the request - response = await client.update_attribute(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateAttribute_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_async.py deleted file mode 100644 index 2624bd2d844a..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_deployment_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDeployment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateDeployment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_deployment(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - deployment = apihub_v1.Deployment() - deployment.display_name = "display_name_value" - deployment.deployment_type.enum_values.values.id = "id_value" - deployment.deployment_type.enum_values.values.display_name = "display_name_value" - deployment.resource_uri = "resource_uri_value" - deployment.endpoints = ['endpoints_value1', 'endpoints_value2'] - - request = apihub_v1.UpdateDeploymentRequest( - deployment=deployment, - ) - - # Make the request - response = await client.update_deployment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateDeployment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_async.py deleted file mode 100644 index 62e5c6bef8c2..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_spec_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_ApiHub_UpdateSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_spec(): - # Create a client - client = apihub_v1.ApiHubAsyncClient() - - # Initialize request argument(s) - spec = apihub_v1.Spec() - spec.display_name = "display_name_value" - spec.spec_type.enum_values.values.id = "id_value" - spec.spec_type.enum_values.values.display_name = "display_name_value" - - request = apihub_v1.UpdateSpecRequest( - spec=spec, - ) - - # Make the request - response = await client.update_spec(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_ApiHub_UpdateSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py deleted file mode 100644 index 1f3b26540e0c..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateHostProjectRegistration -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_create_host_project_registration(): - # Create a client - client = apihub_v1.HostProjectRegistrationServiceAsyncClient() - - # Initialize request argument(s) - host_project_registration = apihub_v1.HostProjectRegistration() - host_project_registration.gcp_project = "gcp_project_value" - - request = apihub_v1.CreateHostProjectRegistrationRequest( - parent="parent_value", - host_project_registration_id="host_project_registration_id_value", - host_project_registration=host_project_registration, - ) - - # Make the request - response = await client.create_host_project_registration(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py deleted file mode 100644 index 0796d381861a..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListHostProjectRegistrations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_host_project_registrations(): - # Create a client - client = apihub_v1.HostProjectRegistrationServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListHostProjectRegistrationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_host_project_registrations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_async.py deleted file mode 100644 index 03effe93eca4..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_lint_spec_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LintSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_LintingService_LintSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_lint_spec(): - # Create a client - client = apihub_v1.LintingServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.LintSpecRequest( - name="name_value", - ) - - # Make the request - await client.lint_spec(request=request) - - -# [END apihub_v1_generated_LintingService_LintSpec_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_async.py deleted file mode 100644 index 3b819e06d8ba..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_update_style_guide_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateStyleGuide -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_LintingService_UpdateStyleGuide_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_update_style_guide(): - # Create a client - client = apihub_v1.LintingServiceAsyncClient() - - # Initialize request argument(s) - style_guide = apihub_v1.StyleGuide() - style_guide.linter = "OTHER" - style_guide.contents.contents = b'contents_blob' - style_guide.contents.mime_type = "mime_type_value" - - request = apihub_v1.UpdateStyleGuideRequest( - style_guide=style_guide, - ) - - # Make the request - response = await client.update_style_guide(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_LintingService_UpdateStyleGuide_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py deleted file mode 100644 index c6d606366236..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteRuntimeProjectAttachment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_delete_runtime_project_attachment(): - # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.DeleteRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Make the request - await client.delete_runtime_project_attachment(request=request) - - -# [END apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py deleted file mode 100644 index 82f990e50294..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListRuntimeProjectAttachments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_list_runtime_project_attachments(): - # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.ListRuntimeProjectAttachmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_runtime_project_attachments(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py b/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py deleted file mode 100644 index 342d51e39899..000000000000 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LookupRuntimeProjectAttachment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub - - -# [START apihub_v1_generated_RuntimeProjectAttachmentService_LookupRuntimeProjectAttachment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 - - -async def sample_lookup_runtime_project_attachment(): - # Create a client - client = apihub_v1.RuntimeProjectAttachmentServiceAsyncClient() - - # Initialize request argument(s) - request = apihub_v1.LookupRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Make the request - response = await client.lookup_runtime_project_attachment(request=request) - - # Handle the response - print(response) - -# [END apihub_v1_generated_RuntimeProjectAttachmentService_LookupRuntimeProjectAttachment_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json index 26cd3b4e3072..2dd2f7e3c87d 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json +++ b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json @@ -8,18 +8,17 @@ ], "language": "PYTHON", "name": "google-cloud-apihub", - "version": "0.1.0" + "version": "0.2.0" }, "snippets": [ { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.create_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.create_dependency", "method": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies.CreateDependency", "service": { @@ -62,10 +61,10 @@ "shortName": "create_dependency" }, "description": "Sample for CreateDependency", - "file": "apihub_v1_generated_api_hub_dependencies_create_dependency_async.py", + "file": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_CreateDependency_async", + "regionTag": "apihub_v1_generated_ApiHubDependencies_CreateDependency_sync", "segments": [ { "end": 56, @@ -98,7 +97,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_create_dependency_async.py" + "title": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py" }, { "canonical": true, @@ -107,30 +106,22 @@ "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.create_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.delete_dependency", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.CreateDependency", + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency", "service": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies", "shortName": "ApiHubDependencies" }, - "shortName": "CreateDependency" + "shortName": "DeleteDependency" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateDependencyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "dependency", - "type": "google.cloud.apihub_v1.types.Dependency" + "type": "google.cloud.apihub_v1.types.DeleteDependencyRequest" }, { - "name": "dependency_id", + "name": "name", "type": "str" }, { @@ -146,22 +137,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "create_dependency" + "shortName": "delete_dependency" }, - "description": "Sample for CreateDependency", - "file": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py", + "description": "Sample for DeleteDependency", + "file": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_CreateDependency_sync", + "regionTag": "apihub_v1_generated_ApiHubDependencies_DeleteDependency_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -171,44 +161,41 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_create_dependency_sync.py" + "title": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.delete_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.get_dependency", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency", + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.GetDependency", "service": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies", "shortName": "ApiHubDependencies" }, - "shortName": "DeleteDependency" + "shortName": "GetDependency" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteDependencyRequest" + "type": "google.cloud.apihub_v1.types.GetDependencyRequest" }, { "name": "name", @@ -227,21 +214,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_dependency" + "resultType": "google.cloud.apihub_v1.types.Dependency", + "shortName": "get_dependency" }, - "description": "Sample for DeleteDependency", - "file": "apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py", + "description": "Sample for GetDependency", + "file": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_DeleteDependency_async", + "regionTag": "apihub_v1_generated_ApiHubDependencies_GetDependency_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -256,15 +244,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_delete_dependency_async.py" + "title": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py" }, { "canonical": true, @@ -273,22 +263,22 @@ "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.delete_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.list_dependencies", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.DeleteDependency", + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.ListDependencies", "service": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies", "shortName": "ApiHubDependencies" }, - "shortName": "DeleteDependency" + "shortName": "ListDependencies" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteDependencyRequest" + "type": "google.cloud.apihub_v1.types.ListDependenciesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -304,21 +294,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_dependency" + "resultType": "google.cloud.apihub_v1.services.api_hub_dependencies.pagers.ListDependenciesPager", + "shortName": "list_dependencies" }, - "description": "Sample for DeleteDependency", - "file": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py", + "description": "Sample for ListDependencies", + "file": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_DeleteDependency_sync", + "regionTag": "apihub_v1_generated_ApiHubDependencies_ListDependencies_sync", "segments": [ { - "end": 49, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 52, "start": 27, "type": "SHORT" }, @@ -333,41 +324,46 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_delete_dependency_sync.py" + "title": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", + "shortName": "ApiHubDependenciesClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.get_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.update_dependency", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.GetDependency", + "fullName": "google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency", "service": { "fullName": "google.cloud.apihub.v1.ApiHubDependencies", "shortName": "ApiHubDependencies" }, - "shortName": "GetDependency" + "shortName": "UpdateDependency" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetDependencyRequest" + "type": "google.cloud.apihub_v1.types.UpdateDependencyRequest" }, { - "name": "name", - "type": "str" + "name": "dependency", + "type": "google.cloud.apihub_v1.types.Dependency" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -383,21 +379,21 @@ } ], "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "get_dependency" + "shortName": "update_dependency" }, - "description": "Sample for GetDependency", - "file": "apihub_v1_generated_api_hub_dependencies_get_dependency_async.py", + "description": "Sample for UpdateDependency", + "file": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_GetDependency_async", + "regionTag": "apihub_v1_generated_ApiHubDependencies_UpdateDependency_sync", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -407,43 +403,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_get_dependency_async.py" + "title": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", - "shortName": "ApiHubDependenciesClient" + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", + "shortName": "ApiHubPluginClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.get_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.disable_plugin", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.GetDependency", + "fullName": "google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin", "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" + "fullName": "google.cloud.apihub.v1.ApiHubPlugin", + "shortName": "ApiHubPlugin" }, - "shortName": "GetDependency" + "shortName": "DisablePlugin" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetDependencyRequest" + "type": "google.cloud.apihub_v1.types.DisablePluginRequest" }, { "name": "name", @@ -462,14 +458,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "get_dependency" + "resultType": "google.cloud.apihub_v1.types.Plugin", + "shortName": "disable_plugin" }, - "description": "Sample for GetDependency", - "file": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py", + "description": "Sample for DisablePlugin", + "file": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_GetDependency_sync", + "regionTag": "apihub_v1_generated_ApiHubPlugin_DisablePlugin_sync", "segments": [ { "end": 51, @@ -502,32 +498,31 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_get_dependency_sync.py" + "title": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", + "shortName": "ApiHubPluginClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.list_dependencies", + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.enable_plugin", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.ListDependencies", + "fullName": "google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin", "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" + "fullName": "google.cloud.apihub.v1.ApiHubPlugin", + "shortName": "ApiHubPlugin" }, - "shortName": "ListDependencies" + "shortName": "EnablePlugin" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListDependenciesRequest" + "type": "google.cloud.apihub_v1.types.EnablePluginRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -543,22 +538,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub_dependencies.pagers.ListDependenciesAsyncPager", - "shortName": "list_dependencies" + "resultType": "google.cloud.apihub_v1.types.Plugin", + "shortName": "enable_plugin" }, - "description": "Sample for ListDependencies", - "file": "apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py", + "description": "Sample for EnablePlugin", + "file": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_ListDependencies_async", + "regionTag": "apihub_v1_generated_ApiHubPlugin_EnablePlugin_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -578,36 +573,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_list_dependencies_async.py" + "title": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", - "shortName": "ApiHubDependenciesClient" + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", + "shortName": "ApiHubPluginClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.list_dependencies", + "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.get_plugin", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.ListDependencies", + "fullName": "google.cloud.apihub.v1.ApiHubPlugin.GetPlugin", "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" + "fullName": "google.cloud.apihub.v1.ApiHubPlugin", + "shortName": "ApiHubPlugin" }, - "shortName": "ListDependencies" + "shortName": "GetPlugin" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListDependenciesRequest" + "type": "google.cloud.apihub_v1.types.GetPluginRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -623,22 +618,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub_dependencies.pagers.ListDependenciesPager", - "shortName": "list_dependencies" + "resultType": "google.cloud.apihub_v1.types.Plugin", + "shortName": "get_plugin" }, - "description": "Sample for ListDependencies", - "file": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py", + "description": "Sample for GetPlugin", + "file": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_ListDependencies_sync", + "regionTag": "apihub_v1_generated_ApiHubPlugin_GetPlugin_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -658,4532 +653,44 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_dependencies_list_dependencies_sync.py" + "title": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient", - "shortName": "ApiHubDependenciesAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesAsyncClient.update_dependency", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateApi", "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "UpdateDependency" + "shortName": "CreateApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateDependencyRequest" + "type": "google.cloud.apihub_v1.types.CreateApiRequest" }, { - "name": "dependency", - "type": "google.cloud.apihub_v1.types.Dependency" + "name": "parent", + "type": "str" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "api", + "type": "google.cloud.apihub_v1.types.Api" }, { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "update_dependency" - }, - "description": "Sample for UpdateDependency", - "file": "apihub_v1_generated_api_hub_dependencies_update_dependency_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_UpdateDependency_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_dependencies_update_dependency_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient", - "shortName": "ApiHubDependenciesClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubDependenciesClient.update_dependency", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies.UpdateDependency", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubDependencies", - "shortName": "ApiHubDependencies" - }, - "shortName": "UpdateDependency" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateDependencyRequest" - }, - { - "name": "dependency", - "type": "google.cloud.apihub_v1.types.Dependency" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Dependency", - "shortName": "update_dependency" - }, - "description": "Sample for UpdateDependency", - "file": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubDependencies_UpdateDependency_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_dependencies_update_dependency_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient", - "shortName": "ApiHubPluginAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient.disable_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "DisablePlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DisablePluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "disable_plugin" - }, - "description": "Sample for DisablePlugin", - "file": "apihub_v1_generated_api_hub_plugin_disable_plugin_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_DisablePlugin_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_disable_plugin_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", - "shortName": "ApiHubPluginClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.disable_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.DisablePlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "DisablePlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DisablePluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "disable_plugin" - }, - "description": "Sample for DisablePlugin", - "file": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_DisablePlugin_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_disable_plugin_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient", - "shortName": "ApiHubPluginAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient.enable_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "EnablePlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.EnablePluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "enable_plugin" - }, - "description": "Sample for EnablePlugin", - "file": "apihub_v1_generated_api_hub_plugin_enable_plugin_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_EnablePlugin_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_enable_plugin_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", - "shortName": "ApiHubPluginClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.enable_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.EnablePlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "EnablePlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.EnablePluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "enable_plugin" - }, - "description": "Sample for EnablePlugin", - "file": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_EnablePlugin_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_enable_plugin_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient", - "shortName": "ApiHubPluginAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginAsyncClient.get_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.GetPlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "GetPlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetPluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "get_plugin" - }, - "description": "Sample for GetPlugin", - "file": "apihub_v1_generated_api_hub_plugin_get_plugin_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_GetPlugin_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_get_plugin_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient", - "shortName": "ApiHubPluginClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubPluginClient.get_plugin", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin.GetPlugin", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHubPlugin", - "shortName": "ApiHubPlugin" - }, - "shortName": "GetPlugin" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetPluginRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Plugin", - "shortName": "get_plugin" - }, - "description": "Sample for GetPlugin", - "file": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHubPlugin_GetPlugin_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_plugin_get_plugin_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateApiRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "api", - "type": "google.cloud.apihub_v1.types.Api" - }, - { - "name": "api_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "create_api" - }, - "description": "Sample for CreateApi", - "file": "apihub_v1_generated_api_hub_create_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateApi_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateApiRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "api", - "type": "google.cloud.apihub_v1.types.Api" - }, - { - "name": "api_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "create_api" - }, - "description": "Sample for CreateApi", - "file": "apihub_v1_generated_api_hub_create_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateApi_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateAttributeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "attribute", - "type": "google.cloud.apihub_v1.types.Attribute" - }, - { - "name": "attribute_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "create_attribute" - }, - "description": "Sample for CreateAttribute", - "file": "apihub_v1_generated_api_hub_create_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateAttribute_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateAttributeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "attribute", - "type": "google.cloud.apihub_v1.types.Attribute" - }, - { - "name": "attribute_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "create_attribute" - }, - "description": "Sample for CreateAttribute", - "file": "apihub_v1_generated_api_hub_create_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateAttribute_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateDeploymentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deployment", - "type": "google.cloud.apihub_v1.types.Deployment" - }, - { - "name": "deployment_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "create_deployment" - }, - "description": "Sample for CreateDeployment", - "file": "apihub_v1_generated_api_hub_create_deployment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateDeployment_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_deployment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateDeploymentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "deployment", - "type": "google.cloud.apihub_v1.types.Deployment" - }, - { - "name": "deployment_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "create_deployment" - }, - "description": "Sample for CreateDeployment", - "file": "apihub_v1_generated_api_hub_create_deployment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateDeployment_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_deployment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateExternalApiRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "external_api", - "type": "google.cloud.apihub_v1.types.ExternalApi" - }, - { - "name": "external_api_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "create_external_api" - }, - "description": "Sample for CreateExternalApi", - "file": "apihub_v1_generated_api_hub_create_external_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateExternalApi_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_external_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateExternalApiRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "external_api", - "type": "google.cloud.apihub_v1.types.ExternalApi" - }, - { - "name": "external_api_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "create_external_api" - }, - "description": "Sample for CreateExternalApi", - "file": "apihub_v1_generated_api_hub_create_external_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateExternalApi_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_external_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateSpecRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "spec", - "type": "google.cloud.apihub_v1.types.Spec" - }, - { - "name": "spec_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "create_spec" - }, - "description": "Sample for CreateSpec", - "file": "apihub_v1_generated_api_hub_create_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateSpec_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateSpecRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "spec", - "type": "google.cloud.apihub_v1.types.Spec" - }, - { - "name": "spec_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "create_spec" - }, - "description": "Sample for CreateSpec", - "file": "apihub_v1_generated_api_hub_create_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateSpec_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.create_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateVersionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "version", - "type": "google.cloud.apihub_v1.types.Version" - }, - { - "name": "version_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "create_version" - }, - "description": "Sample for CreateVersion", - "file": "apihub_v1_generated_api_hub_create_version_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateVersion_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_version_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.create_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.CreateVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "CreateVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.CreateVersionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "version", - "type": "google.cloud.apihub_v1.types.Version" - }, - { - "name": "version_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "create_version" - }, - "description": "Sample for CreateVersion", - "file": "apihub_v1_generated_api_hub_create_version_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_CreateVersion_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_create_version_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_api" - }, - "description": "Sample for DeleteApi", - "file": "apihub_v1_generated_api_hub_delete_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteApi_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_api" - }, - "description": "Sample for DeleteApi", - "file": "apihub_v1_generated_api_hub_delete_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteApi_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_attribute" - }, - "description": "Sample for DeleteAttribute", - "file": "apihub_v1_generated_api_hub_delete_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteAttribute_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_attribute" - }, - "description": "Sample for DeleteAttribute", - "file": "apihub_v1_generated_api_hub_delete_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteAttribute_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteDeploymentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_deployment" - }, - "description": "Sample for DeleteDeployment", - "file": "apihub_v1_generated_api_hub_delete_deployment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteDeployment_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_deployment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteDeploymentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_deployment" - }, - "description": "Sample for DeleteDeployment", - "file": "apihub_v1_generated_api_hub_delete_deployment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteDeployment_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_deployment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteExternalApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_external_api" - }, - "description": "Sample for DeleteExternalApi", - "file": "apihub_v1_generated_api_hub_delete_external_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteExternalApi_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_external_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteExternalApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_external_api" - }, - "description": "Sample for DeleteExternalApi", - "file": "apihub_v1_generated_api_hub_delete_external_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteExternalApi_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_external_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_spec" - }, - "description": "Sample for DeleteSpec", - "file": "apihub_v1_generated_api_hub_delete_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteSpec_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_spec" - }, - "description": "Sample for DeleteSpec", - "file": "apihub_v1_generated_api_hub_delete_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteSpec_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.delete_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_version" - }, - "description": "Sample for DeleteVersion", - "file": "apihub_v1_generated_api_hub_delete_version_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteVersion_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_version_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.DeleteVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "DeleteVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_version" - }, - "description": "Sample for DeleteVersion", - "file": "apihub_v1_generated_api_hub_delete_version_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_DeleteVersion_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_delete_version_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_api_operation", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetApiOperation", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetApiOperation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiOperationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ApiOperation", - "shortName": "get_api_operation" - }, - "description": "Sample for GetApiOperation", - "file": "apihub_v1_generated_api_hub_get_api_operation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetApiOperation_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_api_operation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api_operation", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetApiOperation", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetApiOperation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiOperationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ApiOperation", - "shortName": "get_api_operation" - }, - "description": "Sample for GetApiOperation", - "file": "apihub_v1_generated_api_hub_get_api_operation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetApiOperation_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_api_operation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "get_api" - }, - "description": "Sample for GetApi", - "file": "apihub_v1_generated_api_hub_get_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetApi_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "get_api" - }, - "description": "Sample for GetApi", - "file": "apihub_v1_generated_api_hub_get_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetApi_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "get_attribute" - }, - "description": "Sample for GetAttribute", - "file": "apihub_v1_generated_api_hub_get_attribute_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetAttribute_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_attribute_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_attribute", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetAttribute", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetAttribute" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetAttributeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "get_attribute" - }, - "description": "Sample for GetAttribute", - "file": "apihub_v1_generated_api_hub_get_attribute_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetAttribute_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_attribute_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_definition", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetDefinition", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetDefinition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetDefinitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Definition", - "shortName": "get_definition" - }, - "description": "Sample for GetDefinition", - "file": "apihub_v1_generated_api_hub_get_definition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetDefinition_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_definition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_definition", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetDefinition", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetDefinition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetDefinitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Definition", - "shortName": "get_definition" - }, - "description": "Sample for GetDefinition", - "file": "apihub_v1_generated_api_hub_get_definition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetDefinition_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_definition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetDeploymentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "get_deployment" - }, - "description": "Sample for GetDeployment", - "file": "apihub_v1_generated_api_hub_get_deployment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetDeployment_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_deployment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_deployment", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetDeployment", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetDeployment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetDeploymentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "get_deployment" - }, - "description": "Sample for GetDeployment", - "file": "apihub_v1_generated_api_hub_get_deployment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetDeployment_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_deployment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetExternalApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "get_external_api" - }, - "description": "Sample for GetExternalApi", - "file": "apihub_v1_generated_api_hub_get_external_api_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetExternalApi_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_external_api_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_external_api", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetExternalApi", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetExternalApi" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetExternalApiRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "get_external_api" - }, - "description": "Sample for GetExternalApi", - "file": "apihub_v1_generated_api_hub_get_external_api_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetExternalApi_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_external_api_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_spec_contents", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetSpecContents", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetSpecContents" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetSpecContentsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.SpecContents", - "shortName": "get_spec_contents" - }, - "description": "Sample for GetSpecContents", - "file": "apihub_v1_generated_api_hub_get_spec_contents_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetSpecContents_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_spec_contents_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec_contents", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetSpecContents", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetSpecContents" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetSpecContentsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.SpecContents", - "shortName": "get_spec_contents" - }, - "description": "Sample for GetSpecContents", - "file": "apihub_v1_generated_api_hub_get_spec_contents_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetSpecContents_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_spec_contents_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "get_spec" - }, - "description": "Sample for GetSpec", - "file": "apihub_v1_generated_api_hub_get_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetSpec_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetSpec", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "get_spec" - }, - "description": "Sample for GetSpec", - "file": "apihub_v1_generated_api_hub_get_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetSpec_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.get_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "get_version" - }, - "description": "Sample for GetVersion", - "file": "apihub_v1_generated_api_hub_get_version_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetVersion_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_version_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.get_version", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.GetVersion", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "GetVersion" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.GetVersionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "get_version" - }, - "description": "Sample for GetVersion", - "file": "apihub_v1_generated_api_hub_get_version_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_GetVersion_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_get_version_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_api_operations", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListApiOperations", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListApiOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListApiOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApiOperationsAsyncPager", - "shortName": "list_api_operations" - }, - "description": "Sample for ListApiOperations", - "file": "apihub_v1_generated_api_hub_list_api_operations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListApiOperations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_api_operations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_api_operations", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListApiOperations", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListApiOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListApiOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApiOperationsPager", - "shortName": "list_api_operations" - }, - "description": "Sample for ListApiOperations", - "file": "apihub_v1_generated_api_hub_list_api_operations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListApiOperations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_api_operations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_apis", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListApis", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListApis" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListApisRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApisAsyncPager", - "shortName": "list_apis" - }, - "description": "Sample for ListApis", - "file": "apihub_v1_generated_api_hub_list_apis_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListApis_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_apis_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_apis", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListApis", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListApis" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListApisRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApisPager", - "shortName": "list_apis" - }, - "description": "Sample for ListApis", - "file": "apihub_v1_generated_api_hub_list_apis_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListApis_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_apis_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_attributes", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListAttributes", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListAttributes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListAttributesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListAttributesAsyncPager", - "shortName": "list_attributes" - }, - "description": "Sample for ListAttributes", - "file": "apihub_v1_generated_api_hub_list_attributes_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListAttributes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_attributes_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_attributes", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListAttributes", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListAttributes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListAttributesRequest" - }, - { - "name": "parent", + "name": "api_id", "type": "str" }, { @@ -5199,22 +706,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListAttributesPager", - "shortName": "list_attributes" + "resultType": "google.cloud.apihub_v1.types.Api", + "shortName": "create_api" }, - "description": "Sample for ListAttributes", - "file": "apihub_v1_generated_api_hub_list_attributes_sync.py", + "description": "Sample for CreateApi", + "file": "apihub_v1_generated_api_hub_create_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListAttributes_sync", + "regionTag": "apihub_v1_generated_ApiHub_CreateApi_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -5224,103 +731,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_attributes_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_deployments", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListDeployments", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListDeployments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListDeploymentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListDeploymentsAsyncPager", - "shortName": "list_deployments" - }, - "description": "Sample for ListDeployments", - "file": "apihub_v1_generated_api_hub_list_deployments_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListDeployments_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, { "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_deployments_async.py" + "title": "apihub_v1_generated_api_hub_create_api_sync.py" }, { "canonical": true, @@ -5329,183 +755,30 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_deployments", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListDeployments", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListDeployments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListDeploymentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListDeploymentsPager", - "shortName": "list_deployments" - }, - "description": "Sample for ListDeployments", - "file": "apihub_v1_generated_api_hub_list_deployments_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListDeployments_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_deployments_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_external_apis", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_attribute", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListExternalApis", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateAttribute", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListExternalApis" + "shortName": "CreateAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListExternalApisRequest" + "type": "google.cloud.apihub_v1.types.CreateAttributeRequest" }, { "name": "parent", "type": "str" }, { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListExternalApisAsyncPager", - "shortName": "list_external_apis" - }, - "description": "Sample for ListExternalApis", - "file": "apihub_v1_generated_api_hub_list_external_apis_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListExternalApis_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "apihub_v1_generated_api_hub_list_external_apis_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.apihub_v1.ApiHubClient", - "shortName": "ApiHubClient" - }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_external_apis", - "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListExternalApis", - "service": { - "fullName": "google.cloud.apihub.v1.ApiHub", - "shortName": "ApiHub" - }, - "shortName": "ListExternalApis" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.apihub_v1.types.ListExternalApisRequest" + "name": "attribute", + "type": "google.cloud.apihub_v1.types.Attribute" }, { - "name": "parent", + "name": "attribute_id", "type": "str" }, { @@ -5521,22 +794,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListExternalApisPager", - "shortName": "list_external_apis" + "resultType": "google.cloud.apihub_v1.types.Attribute", + "shortName": "create_attribute" }, - "description": "Sample for ListExternalApis", - "file": "apihub_v1_generated_api_hub_list_external_apis_sync.py", + "description": "Sample for CreateAttribute", + "file": "apihub_v1_generated_api_hub_create_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListExternalApis_sync", + "regionTag": "apihub_v1_generated_ApiHub_CreateAttribute_sync", "segments": [ { - "end": 52, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 57, "start": 27, "type": "SHORT" }, @@ -5546,49 +819,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_external_apis_sync.py" + "title": "apihub_v1_generated_api_hub_create_attribute_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_specs", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_deployment", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListSpecs", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateDeployment", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListSpecs" + "shortName": "CreateDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListSpecsRequest" + "type": "google.cloud.apihub_v1.types.CreateDeploymentRequest" }, { "name": "parent", "type": "str" }, + { + "name": "deployment", + "type": "google.cloud.apihub_v1.types.Deployment" + }, + { + "name": "deployment_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5602,22 +882,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListSpecsAsyncPager", - "shortName": "list_specs" + "resultType": "google.cloud.apihub_v1.types.Deployment", + "shortName": "create_deployment" }, - "description": "Sample for ListSpecs", - "file": "apihub_v1_generated_api_hub_list_specs_async.py", + "description": "Sample for CreateDeployment", + "file": "apihub_v1_generated_api_hub_create_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListSpecs_async", + "regionTag": "apihub_v1_generated_ApiHub_CreateDeployment_sync", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, @@ -5627,22 +907,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 53, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 54, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_specs_async.py" + "title": "apihub_v1_generated_api_hub_create_deployment_sync.py" }, { "canonical": true, @@ -5651,24 +931,32 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_specs", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_external_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListSpecs", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateExternalApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListSpecs" + "shortName": "CreateExternalApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListSpecsRequest" + "type": "google.cloud.apihub_v1.types.CreateExternalApiRequest" }, { "name": "parent", "type": "str" }, + { + "name": "external_api", + "type": "google.cloud.apihub_v1.types.ExternalApi" + }, + { + "name": "external_api_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5682,22 +970,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListSpecsPager", - "shortName": "list_specs" + "resultType": "google.cloud.apihub_v1.types.ExternalApi", + "shortName": "create_external_api" }, - "description": "Sample for ListSpecs", - "file": "apihub_v1_generated_api_hub_list_specs_sync.py", + "description": "Sample for CreateExternalApi", + "file": "apihub_v1_generated_api_hub_create_external_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListSpecs_sync", + "regionTag": "apihub_v1_generated_ApiHub_CreateExternalApi_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -5707,49 +995,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_specs_sync.py" + "title": "apihub_v1_generated_api_hub_create_external_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.list_versions", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_spec", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListVersions", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateSpec", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListVersions" + "shortName": "CreateSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListVersionsRequest" + "type": "google.cloud.apihub_v1.types.CreateSpecRequest" }, { "name": "parent", "type": "str" }, + { + "name": "spec", + "type": "google.cloud.apihub_v1.types.Spec" + }, + { + "name": "spec_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5763,22 +1058,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListVersionsAsyncPager", - "shortName": "list_versions" + "resultType": "google.cloud.apihub_v1.types.Spec", + "shortName": "create_spec" }, - "description": "Sample for ListVersions", - "file": "apihub_v1_generated_api_hub_list_versions_async.py", + "description": "Sample for CreateSpec", + "file": "apihub_v1_generated_api_hub_create_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListVersions_async", + "regionTag": "apihub_v1_generated_ApiHub_CreateSpec_sync", "segments": [ { - "end": 52, + "end": 57, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 57, "start": 27, "type": "SHORT" }, @@ -5788,22 +1083,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 54, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 58, + "start": 55, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_versions_async.py" + "title": "apihub_v1_generated_api_hub_create_spec_sync.py" }, { "canonical": true, @@ -5812,24 +1107,32 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.list_versions", + "fullName": "google.cloud.apihub_v1.ApiHubClient.create_version", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.ListVersions", + "fullName": "google.cloud.apihub.v1.ApiHub.CreateVersion", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "ListVersions" + "shortName": "CreateVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListVersionsRequest" + "type": "google.cloud.apihub_v1.types.CreateVersionRequest" }, { "name": "parent", "type": "str" }, + { + "name": "version", + "type": "google.cloud.apihub_v1.types.Version" + }, + { + "name": "version_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5843,22 +1146,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListVersionsPager", - "shortName": "list_versions" + "resultType": "google.cloud.apihub_v1.types.Version", + "shortName": "create_version" }, - "description": "Sample for ListVersions", - "file": "apihub_v1_generated_api_hub_list_versions_sync.py", + "description": "Sample for CreateVersion", + "file": "apihub_v1_generated_api_hub_create_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_ListVersions_sync", + "regionTag": "apihub_v1_generated_ApiHub_CreateVersion_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -5868,51 +1171,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_list_versions_sync.py" + "title": "apihub_v1_generated_api_hub_create_version_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.search_resources", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.SearchResources", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "SearchResources" + "shortName": "DeleteApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.SearchResourcesRequest" - }, - { - "name": "location", - "type": "str" + "type": "google.cloud.apihub_v1.types.DeleteApiRequest" }, { - "name": "query", + "name": "name", "type": "str" }, { @@ -5928,22 +1226,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.SearchResourcesAsyncPager", - "shortName": "search_resources" + "shortName": "delete_api" }, - "description": "Sample for SearchResources", - "file": "apihub_v1_generated_api_hub_search_resources_async.py", + "description": "Sample for DeleteApi", + "file": "apihub_v1_generated_api_hub_delete_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_SearchResources_async", + "regionTag": "apihub_v1_generated_ApiHub_DeleteApi_sync", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -5953,22 +1250,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 50, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_search_resources_async.py" + "title": "apihub_v1_generated_api_hub_delete_api_sync.py" }, { "canonical": true, @@ -5977,26 +1272,22 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.search_resources", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_attribute", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.SearchResources", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteAttribute", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "SearchResources" + "shortName": "DeleteAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.SearchResourcesRequest" - }, - { - "name": "location", - "type": "str" + "type": "google.cloud.apihub_v1.types.DeleteAttributeRequest" }, { - "name": "query", + "name": "name", "type": "str" }, { @@ -6012,22 +1303,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.SearchResourcesPager", - "shortName": "search_resources" + "shortName": "delete_attribute" }, - "description": "Sample for SearchResources", - "file": "apihub_v1_generated_api_hub_search_resources_sync.py", + "description": "Sample for DeleteAttribute", + "file": "apihub_v1_generated_api_hub_delete_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_SearchResources_sync", + "regionTag": "apihub_v1_generated_ApiHub_DeleteAttribute_sync", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6037,52 +1327,45 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 50, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_search_resources_sync.py" + "title": "apihub_v1_generated_api_hub_delete_attribute_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_api", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_deployment", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateApi", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteDeployment", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateApi" + "shortName": "DeleteDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateApiRequest" - }, - { - "name": "api", - "type": "google.cloud.apihub_v1.types.Api" + "type": "google.cloud.apihub_v1.types.DeleteDeploymentRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6097,22 +1380,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "update_api" + "shortName": "delete_deployment" }, - "description": "Sample for UpdateApi", - "file": "apihub_v1_generated_api_hub_update_api_async.py", + "description": "Sample for DeleteDeployment", + "file": "apihub_v1_generated_api_hub_delete_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateApi_async", + "regionTag": "apihub_v1_generated_ApiHub_DeleteDeployment_sync", "segments": [ { - "end": 54, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6122,22 +1404,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_api_async.py" + "title": "apihub_v1_generated_api_hub_delete_deployment_sync.py" }, { "canonical": true, @@ -6146,27 +1426,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_api", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_external_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateApi", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteExternalApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateApi" + "shortName": "DeleteExternalApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateApiRequest" - }, - { - "name": "api", - "type": "google.cloud.apihub_v1.types.Api" + "type": "google.cloud.apihub_v1.types.DeleteExternalApiRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6181,22 +1457,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Api", - "shortName": "update_api" + "shortName": "delete_external_api" }, - "description": "Sample for UpdateApi", - "file": "apihub_v1_generated_api_hub_update_api_sync.py", + "description": "Sample for DeleteExternalApi", + "file": "apihub_v1_generated_api_hub_delete_external_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateApi_sync", + "regionTag": "apihub_v1_generated_ApiHub_DeleteExternalApi_sync", "segments": [ { - "end": 54, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6206,52 +1481,45 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_api_sync.py" + "title": "apihub_v1_generated_api_hub_delete_external_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_attribute", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_spec", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateAttribute", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteSpec", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateAttribute" + "shortName": "DeleteSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateAttributeRequest" - }, - { - "name": "attribute", - "type": "google.cloud.apihub_v1.types.Attribute" + "type": "google.cloud.apihub_v1.types.DeleteSpecRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6266,22 +1534,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "update_attribute" + "shortName": "delete_spec" }, - "description": "Sample for UpdateAttribute", - "file": "apihub_v1_generated_api_hub_update_attribute_async.py", + "description": "Sample for DeleteSpec", + "file": "apihub_v1_generated_api_hub_delete_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateAttribute_async", + "regionTag": "apihub_v1_generated_ApiHub_DeleteSpec_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6291,22 +1558,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_attribute_async.py" + "title": "apihub_v1_generated_api_hub_delete_spec_sync.py" }, { "canonical": true, @@ -6315,27 +1580,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_attribute", + "fullName": "google.cloud.apihub_v1.ApiHubClient.delete_version", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateAttribute", + "fullName": "google.cloud.apihub.v1.ApiHub.DeleteVersion", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateAttribute" + "shortName": "DeleteVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateAttributeRequest" - }, - { - "name": "attribute", - "type": "google.cloud.apihub_v1.types.Attribute" + "type": "google.cloud.apihub_v1.types.DeleteVersionRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6350,22 +1611,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Attribute", - "shortName": "update_attribute" + "shortName": "delete_version" }, - "description": "Sample for UpdateAttribute", - "file": "apihub_v1_generated_api_hub_update_attribute_sync.py", + "description": "Sample for DeleteVersion", + "file": "apihub_v1_generated_api_hub_delete_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateAttribute_sync", + "regionTag": "apihub_v1_generated_ApiHub_DeleteVersion_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -6375,52 +1635,45 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_attribute_sync.py" + "title": "apihub_v1_generated_api_hub_delete_version_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_deployment", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api_operation", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateDeployment", + "fullName": "google.cloud.apihub.v1.ApiHub.GetApiOperation", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateDeployment" + "shortName": "GetApiOperation" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateDeploymentRequest" - }, - { - "name": "deployment", - "type": "google.cloud.apihub_v1.types.Deployment" + "type": "google.cloud.apihub_v1.types.GetApiOperationRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6435,22 +1688,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "update_deployment" + "resultType": "google.cloud.apihub_v1.types.ApiOperation", + "shortName": "get_api_operation" }, - "description": "Sample for UpdateDeployment", - "file": "apihub_v1_generated_api_hub_update_deployment_async.py", + "description": "Sample for GetApiOperation", + "file": "apihub_v1_generated_api_hub_get_api_operation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateDeployment_async", + "regionTag": "apihub_v1_generated_ApiHub_GetApiOperation_sync", "segments": [ { - "end": 58, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6460,22 +1713,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 53, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_deployment_async.py" + "title": "apihub_v1_generated_api_hub_get_api_operation_sync.py" }, { "canonical": true, @@ -6484,27 +1737,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_deployment", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateDeployment", + "fullName": "google.cloud.apihub.v1.ApiHub.GetApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateDeployment" + "shortName": "GetApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateDeploymentRequest" - }, - { - "name": "deployment", - "type": "google.cloud.apihub_v1.types.Deployment" + "type": "google.cloud.apihub_v1.types.GetApiRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6519,22 +1768,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Deployment", - "shortName": "update_deployment" + "resultType": "google.cloud.apihub_v1.types.Api", + "shortName": "get_api" }, - "description": "Sample for UpdateDeployment", - "file": "apihub_v1_generated_api_hub_update_deployment_sync.py", + "description": "Sample for GetApi", + "file": "apihub_v1_generated_api_hub_get_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateDeployment_sync", + "regionTag": "apihub_v1_generated_ApiHub_GetApi_sync", "segments": [ { - "end": 58, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6544,52 +1793,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 53, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_deployment_sync.py" + "title": "apihub_v1_generated_api_hub_get_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_external_api", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_attribute", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateExternalApi", + "fullName": "google.cloud.apihub.v1.ApiHub.GetAttribute", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateExternalApi" + "shortName": "GetAttribute" }, "parameters": [ { - "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateExternalApiRequest" - }, - { - "name": "external_api", - "type": "google.cloud.apihub_v1.types.ExternalApi" + "name": "request", + "type": "google.cloud.apihub_v1.types.GetAttributeRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6604,22 +1848,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "update_external_api" + "resultType": "google.cloud.apihub_v1.types.Attribute", + "shortName": "get_attribute" }, - "description": "Sample for UpdateExternalApi", - "file": "apihub_v1_generated_api_hub_update_external_api_async.py", + "description": "Sample for GetAttribute", + "file": "apihub_v1_generated_api_hub_get_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateExternalApi_async", + "regionTag": "apihub_v1_generated_ApiHub_GetAttribute_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6629,22 +1873,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_external_api_async.py" + "title": "apihub_v1_generated_api_hub_get_attribute_sync.py" }, { "canonical": true, @@ -6653,27 +1897,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_external_api", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_definition", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateExternalApi", + "fullName": "google.cloud.apihub.v1.ApiHub.GetDefinition", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateExternalApi" + "shortName": "GetDefinition" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateExternalApiRequest" - }, - { - "name": "external_api", - "type": "google.cloud.apihub_v1.types.ExternalApi" + "type": "google.cloud.apihub_v1.types.GetDefinitionRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6688,22 +1928,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.ExternalApi", - "shortName": "update_external_api" + "resultType": "google.cloud.apihub_v1.types.Definition", + "shortName": "get_definition" }, - "description": "Sample for UpdateExternalApi", - "file": "apihub_v1_generated_api_hub_update_external_api_sync.py", + "description": "Sample for GetDefinition", + "file": "apihub_v1_generated_api_hub_get_definition_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateExternalApi_sync", + "regionTag": "apihub_v1_generated_ApiHub_GetDefinition_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6713,52 +1953,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_external_api_sync.py" + "title": "apihub_v1_generated_api_hub_get_definition_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_spec", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_deployment", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateSpec", + "fullName": "google.cloud.apihub.v1.ApiHub.GetDeployment", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateSpec" + "shortName": "GetDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateSpecRequest" - }, - { - "name": "spec", - "type": "google.cloud.apihub_v1.types.Spec" + "type": "google.cloud.apihub_v1.types.GetDeploymentRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6773,22 +2008,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "update_spec" + "resultType": "google.cloud.apihub_v1.types.Deployment", + "shortName": "get_deployment" }, - "description": "Sample for UpdateSpec", - "file": "apihub_v1_generated_api_hub_update_spec_async.py", + "description": "Sample for GetDeployment", + "file": "apihub_v1_generated_api_hub_get_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateSpec_async", + "regionTag": "apihub_v1_generated_ApiHub_GetDeployment_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6798,22 +2033,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_spec_async.py" + "title": "apihub_v1_generated_api_hub_get_deployment_sync.py" }, { "canonical": true, @@ -6822,27 +2057,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_spec", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_external_api", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateSpec", + "fullName": "google.cloud.apihub.v1.ApiHub.GetExternalApi", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateSpec" + "shortName": "GetExternalApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateSpecRequest" - }, - { - "name": "spec", - "type": "google.cloud.apihub_v1.types.Spec" + "type": "google.cloud.apihub_v1.types.GetExternalApiRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6857,22 +2088,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Spec", - "shortName": "update_spec" + "resultType": "google.cloud.apihub_v1.types.ExternalApi", + "shortName": "get_external_api" }, - "description": "Sample for UpdateSpec", - "file": "apihub_v1_generated_api_hub_update_spec_sync.py", + "description": "Sample for GetExternalApi", + "file": "apihub_v1_generated_api_hub_get_external_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateSpec_sync", + "regionTag": "apihub_v1_generated_ApiHub_GetExternalApi_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6882,52 +2113,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_spec_sync.py" + "title": "apihub_v1_generated_api_hub_get_external_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient", - "shortName": "ApiHubAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubAsyncClient.update_version", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec_contents", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateVersion", + "fullName": "google.cloud.apihub.v1.ApiHub.GetSpecContents", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateVersion" + "shortName": "GetSpecContents" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateVersionRequest" - }, - { - "name": "version", - "type": "google.cloud.apihub_v1.types.Version" + "type": "google.cloud.apihub_v1.types.GetSpecContentsRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -6942,22 +2168,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "update_version" + "resultType": "google.cloud.apihub_v1.types.SpecContents", + "shortName": "get_spec_contents" }, - "description": "Sample for UpdateVersion", - "file": "apihub_v1_generated_api_hub_update_version_async.py", + "description": "Sample for GetSpecContents", + "file": "apihub_v1_generated_api_hub_get_spec_contents_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateVersion_async", + "regionTag": "apihub_v1_generated_ApiHub_GetSpecContents_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6967,22 +2193,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_version_async.py" + "title": "apihub_v1_generated_api_hub_get_spec_contents_sync.py" }, { "canonical": true, @@ -6991,27 +2217,23 @@ "fullName": "google.cloud.apihub_v1.ApiHubClient", "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ApiHubClient.update_version", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_spec", "method": { - "fullName": "google.cloud.apihub.v1.ApiHub.UpdateVersion", + "fullName": "google.cloud.apihub.v1.ApiHub.GetSpec", "service": { "fullName": "google.cloud.apihub.v1.ApiHub", "shortName": "ApiHub" }, - "shortName": "UpdateVersion" + "shortName": "GetSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateVersionRequest" - }, - { - "name": "version", - "type": "google.cloud.apihub_v1.types.Version" + "type": "google.cloud.apihub_v1.types.GetSpecRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "name", + "type": "str" }, { "name": "retry", @@ -7026,22 +2248,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.Version", - "shortName": "update_version" + "resultType": "google.cloud.apihub_v1.types.Spec", + "shortName": "get_spec" }, - "description": "Sample for UpdateVersion", - "file": "apihub_v1_generated_api_hub_update_version_sync.py", + "description": "Sample for GetSpec", + "file": "apihub_v1_generated_api_hub_get_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_ApiHub_UpdateVersion_sync", + "regionTag": "apihub_v1_generated_ApiHub_GetSpec_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -7051,55 +2273,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_api_hub_update_version_sync.py" + "title": "apihub_v1_generated_api_hub_get_spec_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient", - "shortName": "HostProjectRegistrationServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient.create_host_project_registration", + "fullName": "google.cloud.apihub_v1.ApiHubClient.get_version", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration", + "fullName": "google.cloud.apihub.v1.ApiHub.GetVersion", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "CreateHostProjectRegistration" + "shortName": "GetVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateHostProjectRegistrationRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "host_project_registration", - "type": "google.cloud.apihub_v1.types.HostProjectRegistration" + "type": "google.cloud.apihub_v1.types.GetVersionRequest" }, { - "name": "host_project_registration_id", + "name": "name", "type": "str" }, { @@ -7115,22 +2328,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", - "shortName": "create_host_project_registration" + "resultType": "google.cloud.apihub_v1.types.Version", + "shortName": "get_version" }, - "description": "Sample for CreateHostProjectRegistration", - "file": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py", + "description": "Sample for GetVersion", + "file": "apihub_v1_generated_api_hub_get_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_async", + "regionTag": "apihub_v1_generated_ApiHub_GetVersion_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -7140,56 +2353,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_async.py" + "title": "apihub_v1_generated_api_hub_get_version_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", - "shortName": "HostProjectRegistrationServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.create_host_project_registration", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_api_operations", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration", + "fullName": "google.cloud.apihub.v1.ApiHub.ListApiOperations", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "CreateHostProjectRegistration" + "shortName": "ListApiOperations" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateHostProjectRegistrationRequest" + "type": "google.cloud.apihub_v1.types.ListApiOperationsRequest" }, { "name": "parent", "type": "str" }, - { - "name": "host_project_registration", - "type": "google.cloud.apihub_v1.types.HostProjectRegistration" - }, - { - "name": "host_project_registration_id", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -7203,22 +2408,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", - "shortName": "create_host_project_registration" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApiOperationsPager", + "shortName": "list_api_operations" }, - "description": "Sample for CreateHostProjectRegistration", - "file": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py", + "description": "Sample for ListApiOperations", + "file": "apihub_v1_generated_api_hub_list_api_operations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_sync", + "regionTag": "apihub_v1_generated_ApiHub_ListApiOperations_sync", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7228,47 +2433,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py" + "title": "apihub_v1_generated_api_hub_list_api_operations_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient", - "shortName": "HostProjectRegistrationServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient.get_host_project_registration", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_apis", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration", + "fullName": "google.cloud.apihub.v1.ApiHub.ListApis", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetHostProjectRegistration" + "shortName": "ListApis" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetHostProjectRegistrationRequest" + "type": "google.cloud.apihub_v1.types.ListApisRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -7284,22 +2488,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", - "shortName": "get_host_project_registration" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListApisPager", + "shortName": "list_apis" }, - "description": "Sample for GetHostProjectRegistration", - "file": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py", + "description": "Sample for ListApis", + "file": "apihub_v1_generated_api_hub_list_apis_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_async", + "regionTag": "apihub_v1_generated_ApiHub_ListApis_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7319,36 +2523,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_async.py" + "title": "apihub_v1_generated_api_hub_list_apis_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", - "shortName": "HostProjectRegistrationServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.get_host_project_registration", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_attributes", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration", + "fullName": "google.cloud.apihub.v1.ApiHub.ListAttributes", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetHostProjectRegistration" + "shortName": "ListAttributes" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetHostProjectRegistrationRequest" + "type": "google.cloud.apihub_v1.types.ListAttributesRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -7364,22 +2568,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", - "shortName": "get_host_project_registration" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListAttributesPager", + "shortName": "list_attributes" }, - "description": "Sample for GetHostProjectRegistration", - "file": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py", + "description": "Sample for ListAttributes", + "file": "apihub_v1_generated_api_hub_list_attributes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_sync", + "regionTag": "apihub_v1_generated_ApiHub_ListAttributes_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7399,34 +2603,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py" + "title": "apihub_v1_generated_api_hub_list_attributes_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient", - "shortName": "HostProjectRegistrationServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceAsyncClient.list_host_project_registrations", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_deployments", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations", + "fullName": "google.cloud.apihub.v1.ApiHub.ListDeployments", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "ListHostProjectRegistrations" + "shortName": "ListDeployments" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest" + "type": "google.cloud.apihub_v1.types.ListDeploymentsRequest" }, { "name": "parent", @@ -7445,14 +2648,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.host_project_registration_service.pagers.ListHostProjectRegistrationsAsyncPager", - "shortName": "list_host_project_registrations" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListDeploymentsPager", + "shortName": "list_deployments" }, - "description": "Sample for ListHostProjectRegistrations", - "file": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py", + "description": "Sample for ListDeployments", + "file": "apihub_v1_generated_api_hub_list_deployments_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_async", + "regionTag": "apihub_v1_generated_ApiHub_ListDeployments_sync", "segments": [ { "end": 52, @@ -7485,28 +2688,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_async.py" + "title": "apihub_v1_generated_api_hub_list_deployments_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", - "shortName": "HostProjectRegistrationServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.list_host_project_registrations", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_external_apis", "method": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations", + "fullName": "google.cloud.apihub.v1.ApiHub.ListExternalApis", "service": { - "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", - "shortName": "HostProjectRegistrationService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "ListHostProjectRegistrations" + "shortName": "ListExternalApis" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest" + "type": "google.cloud.apihub_v1.types.ListExternalApisRequest" }, { "name": "parent", @@ -7525,14 +2728,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.host_project_registration_service.pagers.ListHostProjectRegistrationsPager", - "shortName": "list_host_project_registrations" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListExternalApisPager", + "shortName": "list_external_apis" }, - "description": "Sample for ListHostProjectRegistrations", - "file": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py", + "description": "Sample for ListExternalApis", + "file": "apihub_v1_generated_api_hub_list_external_apis_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_sync", + "regionTag": "apihub_v1_generated_ApiHub_ListExternalApis_sync", "segments": [ { "end": 52, @@ -7565,32 +2768,31 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py" + "title": "apihub_v1_generated_api_hub_list_external_apis_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient", - "shortName": "LintingServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient.get_style_guide_contents", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_specs", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuideContents", + "fullName": "google.cloud.apihub.v1.ApiHub.ListSpecs", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetStyleGuideContents" + "shortName": "ListSpecs" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetStyleGuideContentsRequest" + "type": "google.cloud.apihub_v1.types.ListSpecsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -7606,22 +2808,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuideContents", - "shortName": "get_style_guide_contents" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListSpecsPager", + "shortName": "list_specs" }, - "description": "Sample for GetStyleGuideContents", - "file": "apihub_v1_generated_linting_service_get_style_guide_contents_async.py", + "description": "Sample for ListSpecs", + "file": "apihub_v1_generated_api_hub_list_specs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_GetStyleGuideContents_async", + "regionTag": "apihub_v1_generated_ApiHub_ListSpecs_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7641,36 +2843,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_get_style_guide_contents_async.py" + "title": "apihub_v1_generated_api_hub_list_specs_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceClient", - "shortName": "LintingServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide_contents", + "fullName": "google.cloud.apihub_v1.ApiHubClient.list_versions", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuideContents", + "fullName": "google.cloud.apihub.v1.ApiHub.ListVersions", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetStyleGuideContents" + "shortName": "ListVersions" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetStyleGuideContentsRequest" + "type": "google.cloud.apihub_v1.types.ListVersionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -7686,22 +2888,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuideContents", - "shortName": "get_style_guide_contents" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.ListVersionsPager", + "shortName": "list_versions" }, - "description": "Sample for GetStyleGuideContents", - "file": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py", + "description": "Sample for ListVersions", + "file": "apihub_v1_generated_api_hub_list_versions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_GetStyleGuideContents_sync", + "regionTag": "apihub_v1_generated_ApiHub_ListVersions_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -7721,37 +2923,40 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py" + "title": "apihub_v1_generated_api_hub_list_versions_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient", - "shortName": "LintingServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient.get_style_guide", + "fullName": "google.cloud.apihub_v1.ApiHubClient.search_resources", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuide", + "fullName": "google.cloud.apihub.v1.ApiHub.SearchResources", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetStyleGuide" + "shortName": "SearchResources" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetStyleGuideRequest" + "type": "google.cloud.apihub_v1.types.SearchResourcesRequest" }, { - "name": "name", + "name": "location", + "type": "str" + }, + { + "name": "query", "type": "str" }, { @@ -7767,22 +2972,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuide", - "shortName": "get_style_guide" + "resultType": "google.cloud.apihub_v1.services.api_hub.pagers.SearchResourcesPager", + "shortName": "search_resources" }, - "description": "Sample for GetStyleGuide", - "file": "apihub_v1_generated_linting_service_get_style_guide_async.py", + "description": "Sample for SearchResources", + "file": "apihub_v1_generated_api_hub_search_resources_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_GetStyleGuide_async", + "regionTag": "apihub_v1_generated_ApiHub_SearchResources_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -7792,47 +2997,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_get_style_guide_async.py" + "title": "apihub_v1_generated_api_hub_search_resources_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceClient", - "shortName": "LintingServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_api", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuide", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateApi", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "GetStyleGuide" + "shortName": "UpdateApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetStyleGuideRequest" + "type": "google.cloud.apihub_v1.types.UpdateApiRequest" }, { - "name": "name", - "type": "str" + "name": "api", + "type": "google.cloud.apihub_v1.types.Api" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -7847,22 +3056,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuide", - "shortName": "get_style_guide" + "resultType": "google.cloud.apihub_v1.types.Api", + "shortName": "update_api" }, - "description": "Sample for GetStyleGuide", - "file": "apihub_v1_generated_linting_service_get_style_guide_sync.py", + "description": "Sample for UpdateApi", + "file": "apihub_v1_generated_api_hub_update_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_GetStyleGuide_sync", + "regionTag": "apihub_v1_generated_ApiHub_UpdateApi_sync", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -7872,44 +3081,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_get_style_guide_sync.py" + "title": "apihub_v1_generated_api_hub_update_api_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient", - "shortName": "LintingServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient.lint_spec", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_attribute", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.LintSpec", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateAttribute", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "LintSpec" + "shortName": "UpdateAttribute" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LintSpecRequest" + "type": "google.cloud.apihub_v1.types.UpdateAttributeRequest" + }, + { + "name": "attribute", + "type": "google.cloud.apihub_v1.types.Attribute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -7924,21 +3140,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "lint_spec" + "resultType": "google.cloud.apihub_v1.types.Attribute", + "shortName": "update_attribute" }, - "description": "Sample for LintSpec", - "file": "apihub_v1_generated_linting_service_lint_spec_async.py", + "description": "Sample for UpdateAttribute", + "file": "apihub_v1_generated_api_hub_update_attribute_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_LintSpec_async", + "regionTag": "apihub_v1_generated_ApiHub_UpdateAttribute_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, @@ -7948,41 +3165,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_lint_spec_async.py" + "title": "apihub_v1_generated_api_hub_update_attribute_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceClient", - "shortName": "LintingServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceClient.lint_spec", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_deployment", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.LintSpec", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateDeployment", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "LintSpec" + "shortName": "UpdateDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LintSpecRequest" + "type": "google.cloud.apihub_v1.types.UpdateDeploymentRequest" + }, + { + "name": "deployment", + "type": "google.cloud.apihub_v1.types.Deployment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -7997,21 +3224,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "lint_spec" + "resultType": "google.cloud.apihub_v1.types.Deployment", + "shortName": "update_deployment" }, - "description": "Sample for LintSpec", - "file": "apihub_v1_generated_linting_service_lint_spec_sync.py", + "description": "Sample for UpdateDeployment", + "file": "apihub_v1_generated_api_hub_update_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_LintSpec_sync", + "regionTag": "apihub_v1_generated_ApiHub_UpdateDeployment_sync", "segments": [ { - "end": 49, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 58, "start": 27, "type": "SHORT" }, @@ -8021,46 +3249,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 55, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_lint_spec_sync.py" + "title": "apihub_v1_generated_api_hub_update_deployment_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient", - "shortName": "LintingServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceAsyncClient.update_style_guide", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_external_api", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.UpdateStyleGuide", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateExternalApi", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "UpdateStyleGuide" + "shortName": "UpdateExternalApi" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateStyleGuideRequest" + "type": "google.cloud.apihub_v1.types.UpdateExternalApiRequest" }, { - "name": "style_guide", - "type": "google.cloud.apihub_v1.types.StyleGuide" + "name": "external_api", + "type": "google.cloud.apihub_v1.types.ExternalApi" }, { "name": "update_mask", @@ -8079,22 +3308,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuide", - "shortName": "update_style_guide" + "resultType": "google.cloud.apihub_v1.types.ExternalApi", + "shortName": "update_external_api" }, - "description": "Sample for UpdateStyleGuide", - "file": "apihub_v1_generated_linting_service_update_style_guide_async.py", + "description": "Sample for UpdateExternalApi", + "file": "apihub_v1_generated_api_hub_update_external_api_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_UpdateStyleGuide_async", + "regionTag": "apihub_v1_generated_ApiHub_UpdateExternalApi_sync", "segments": [ { - "end": 56, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 54, "start": 27, "type": "SHORT" }, @@ -8104,47 +3333,47 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_update_style_guide_async.py" + "title": "apihub_v1_generated_api_hub_update_external_api_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.LintingServiceClient", - "shortName": "LintingServiceClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.LintingServiceClient.update_style_guide", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_spec", "method": { - "fullName": "google.cloud.apihub.v1.LintingService.UpdateStyleGuide", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateSpec", "service": { - "fullName": "google.cloud.apihub.v1.LintingService", - "shortName": "LintingService" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "UpdateStyleGuide" + "shortName": "UpdateSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.UpdateStyleGuideRequest" + "type": "google.cloud.apihub_v1.types.UpdateSpecRequest" }, { - "name": "style_guide", - "type": "google.cloud.apihub_v1.types.StyleGuide" + "name": "spec", + "type": "google.cloud.apihub_v1.types.Spec" }, { "name": "update_mask", @@ -8163,14 +3392,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.StyleGuide", - "shortName": "update_style_guide" + "resultType": "google.cloud.apihub_v1.types.Spec", + "shortName": "update_spec" }, - "description": "Sample for UpdateStyleGuide", - "file": "apihub_v1_generated_linting_service_update_style_guide_sync.py", + "description": "Sample for UpdateSpec", + "file": "apihub_v1_generated_api_hub_update_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_LintingService_UpdateStyleGuide_sync", + "regionTag": "apihub_v1_generated_ApiHub_UpdateSpec_sync", "segments": [ { "end": 56, @@ -8203,41 +3432,36 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_linting_service_update_style_guide_sync.py" + "title": "apihub_v1_generated_api_hub_update_spec_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient", - "shortName": "ProvisioningAsyncClient" + "fullName": "google.cloud.apihub_v1.ApiHubClient", + "shortName": "ApiHubClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient.create_api_hub_instance", + "fullName": "google.cloud.apihub_v1.ApiHubClient.update_version", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.CreateApiHubInstance", + "fullName": "google.cloud.apihub.v1.ApiHub.UpdateVersion", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.ApiHub", + "shortName": "ApiHub" }, - "shortName": "CreateApiHubInstance" + "shortName": "UpdateVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateApiHubInstanceRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.apihub_v1.types.UpdateVersionRequest" }, { - "name": "api_hub_instance", - "type": "google.cloud.apihub_v1.types.ApiHubInstance" + "name": "version", + "type": "google.cloud.apihub_v1.types.Version" }, { - "name": "api_hub_instance_id", - "type": "str" + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -8252,22 +3476,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.Version", + "shortName": "update_version" }, - "description": "Sample for CreateApiHubInstance", - "file": "apihub_v1_generated_provisioning_create_api_hub_instance_async.py", + "description": "Sample for UpdateVersion", + "file": "apihub_v1_generated_api_hub_update_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_CreateApiHubInstance_async", + "regionTag": "apihub_v1_generated_ApiHub_UpdateVersion_sync", "segments": [ { - "end": 59, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 54, "start": 27, "type": "SHORT" }, @@ -8277,54 +3501,54 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_create_api_hub_instance_async.py" + "title": "apihub_v1_generated_api_hub_update_version_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningClient", - "shortName": "ProvisioningClient" + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", + "shortName": "HostProjectRegistrationServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningClient.create_api_hub_instance", + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.create_host_project_registration", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.CreateApiHubInstance", + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.CreateHostProjectRegistration", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", + "shortName": "HostProjectRegistrationService" }, - "shortName": "CreateApiHubInstance" + "shortName": "CreateHostProjectRegistration" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.CreateHostProjectRegistrationRequest" }, { "name": "parent", "type": "str" }, { - "name": "api_hub_instance", - "type": "google.cloud.apihub_v1.types.ApiHubInstance" + "name": "host_project_registration", + "type": "google.cloud.apihub_v1.types.HostProjectRegistration" }, { - "name": "api_hub_instance_id", + "name": "host_project_registration_id", "type": "str" }, { @@ -8340,22 +3564,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", + "shortName": "create_host_project_registration" }, - "description": "Sample for CreateApiHubInstance", - "file": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py", + "description": "Sample for CreateHostProjectRegistration", + "file": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_CreateApiHubInstance_sync", + "regionTag": "apihub_v1_generated_HostProjectRegistrationService_CreateHostProjectRegistration_sync", "segments": [ { - "end": 59, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 56, "start": 27, "type": "SHORT" }, @@ -8365,44 +3589,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py" + "title": "apihub_v1_generated_host_project_registration_service_create_host_project_registration_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient", - "shortName": "ProvisioningAsyncClient" + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", + "shortName": "HostProjectRegistrationServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient.get_api_hub_instance", + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.get_host_project_registration", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.GetApiHubInstance", + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.GetHostProjectRegistration", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", + "shortName": "HostProjectRegistrationService" }, - "shortName": "GetApiHubInstance" + "shortName": "GetHostProjectRegistration" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.GetHostProjectRegistrationRequest" }, { "name": "name", @@ -8421,14 +3644,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.ApiHubInstance", - "shortName": "get_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.HostProjectRegistration", + "shortName": "get_host_project_registration" }, - "description": "Sample for GetApiHubInstance", - "file": "apihub_v1_generated_provisioning_get_api_hub_instance_async.py", + "description": "Sample for GetHostProjectRegistration", + "file": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_GetApiHubInstance_async", + "regionTag": "apihub_v1_generated_HostProjectRegistrationService_GetHostProjectRegistration_sync", "segments": [ { "end": 51, @@ -8461,31 +3684,31 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_get_api_hub_instance_async.py" + "title": "apihub_v1_generated_host_project_registration_service_get_host_project_registration_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningClient", - "shortName": "ProvisioningClient" + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient", + "shortName": "HostProjectRegistrationServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningClient.get_api_hub_instance", + "fullName": "google.cloud.apihub_v1.HostProjectRegistrationServiceClient.list_host_project_registrations", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.GetApiHubInstance", + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService.ListHostProjectRegistrations", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.HostProjectRegistrationService", + "shortName": "HostProjectRegistrationService" }, - "shortName": "GetApiHubInstance" + "shortName": "ListHostProjectRegistrations" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.ListHostProjectRegistrationsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -8501,22 +3724,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.ApiHubInstance", - "shortName": "get_api_hub_instance" + "resultType": "google.cloud.apihub_v1.services.host_project_registration_service.pagers.ListHostProjectRegistrationsPager", + "shortName": "list_host_project_registrations" }, - "description": "Sample for GetApiHubInstance", - "file": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py", + "description": "Sample for ListHostProjectRegistrations", + "file": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_GetApiHubInstance_sync", + "regionTag": "apihub_v1_generated_HostProjectRegistrationService_ListHostProjectRegistrations_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -8536,37 +3759,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py" + "title": "apihub_v1_generated_host_project_registration_service_list_host_project_registrations_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient", - "shortName": "ProvisioningAsyncClient" + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningAsyncClient.lookup_api_hub_instance", + "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide_contents", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.LookupApiHubInstance", + "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuideContents", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" }, - "shortName": "LookupApiHubInstance" + "shortName": "GetStyleGuideContents" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LookupApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.GetStyleGuideContentsRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -8582,14 +3804,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.LookupApiHubInstanceResponse", - "shortName": "lookup_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.StyleGuideContents", + "shortName": "get_style_guide_contents" }, - "description": "Sample for LookupApiHubInstance", - "file": "apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py", + "description": "Sample for GetStyleGuideContents", + "file": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_LookupApiHubInstance_async", + "regionTag": "apihub_v1_generated_LintingService_GetStyleGuideContents_sync", "segments": [ { "end": 51, @@ -8622,31 +3844,31 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_lookup_api_hub_instance_async.py" + "title": "apihub_v1_generated_linting_service_get_style_guide_contents_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.ProvisioningClient", - "shortName": "ProvisioningClient" + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" }, - "fullName": "google.cloud.apihub_v1.ProvisioningClient.lookup_api_hub_instance", + "fullName": "google.cloud.apihub_v1.LintingServiceClient.get_style_guide", "method": { - "fullName": "google.cloud.apihub.v1.Provisioning.LookupApiHubInstance", + "fullName": "google.cloud.apihub.v1.LintingService.GetStyleGuide", "service": { - "fullName": "google.cloud.apihub.v1.Provisioning", - "shortName": "Provisioning" + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" }, - "shortName": "LookupApiHubInstance" + "shortName": "GetStyleGuide" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LookupApiHubInstanceRequest" + "type": "google.cloud.apihub_v1.types.GetStyleGuideRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -8662,14 +3884,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.LookupApiHubInstanceResponse", - "shortName": "lookup_api_hub_instance" + "resultType": "google.cloud.apihub_v1.types.StyleGuide", + "shortName": "get_style_guide" }, - "description": "Sample for LookupApiHubInstance", - "file": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py", + "description": "Sample for GetStyleGuide", + "file": "apihub_v1_generated_linting_service_get_style_guide_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_Provisioning_LookupApiHubInstance_sync", + "regionTag": "apihub_v1_generated_LintingService_GetStyleGuide_sync", "segments": [ { "end": 51, @@ -8702,41 +3924,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py" + "title": "apihub_v1_generated_linting_service_get_style_guide_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.create_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.LintingServiceClient.lint_spec", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.LintingService.LintSpec", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" }, - "shortName": "CreateRuntimeProjectAttachment" + "shortName": "LintSpec" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateRuntimeProjectAttachmentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "runtime_project_attachment", - "type": "google.cloud.apihub_v1.types.RuntimeProjectAttachment" - }, - { - "name": "runtime_project_attachment_id", - "type": "str" + "type": "google.cloud.apihub_v1.types.LintSpecRequest" }, { "name": "retry", @@ -8751,22 +3960,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", - "shortName": "create_runtime_project_attachment" + "shortName": "lint_spec" }, - "description": "Sample for CreateRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py", + "description": "Sample for LintSpec", + "file": "apihub_v1_generated_linting_service_lint_spec_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_async", + "regionTag": "apihub_v1_generated_LintingService_LintSpec_sync", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -8776,55 +3984,49 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_async.py" + "title": "apihub_v1_generated_linting_service_lint_spec_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", - "shortName": "RuntimeProjectAttachmentServiceClient" + "fullName": "google.cloud.apihub_v1.LintingServiceClient", + "shortName": "LintingServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.create_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.LintingServiceClient.update_style_guide", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.LintingService.UpdateStyleGuide", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.LintingService", + "shortName": "LintingService" }, - "shortName": "CreateRuntimeProjectAttachment" + "shortName": "UpdateStyleGuide" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.CreateRuntimeProjectAttachmentRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.apihub_v1.types.UpdateStyleGuideRequest" }, { - "name": "runtime_project_attachment", - "type": "google.cloud.apihub_v1.types.RuntimeProjectAttachment" + "name": "style_guide", + "type": "google.cloud.apihub_v1.types.StyleGuide" }, { - "name": "runtime_project_attachment_id", - "type": "str" + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -8839,14 +4041,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", - "shortName": "create_runtime_project_attachment" + "resultType": "google.cloud.apihub_v1.types.StyleGuide", + "shortName": "update_style_guide" }, - "description": "Sample for CreateRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py", + "description": "Sample for UpdateStyleGuide", + "file": "apihub_v1_generated_linting_service_update_style_guide_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_sync", + "regionTag": "apihub_v1_generated_LintingService_UpdateStyleGuide_sync", "segments": [ { "end": 56, @@ -8879,32 +4081,39 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py" + "title": "apihub_v1_generated_linting_service_update_style_guide_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ProvisioningClient", + "shortName": "ProvisioningClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.delete_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.ProvisioningClient.create_api_hub_instance", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.Provisioning.CreateApiHubInstance", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.Provisioning", + "shortName": "Provisioning" }, - "shortName": "DeleteRuntimeProjectAttachment" + "shortName": "CreateApiHubInstance" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.CreateApiHubInstanceRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "api_hub_instance", + "type": "google.cloud.apihub_v1.types.ApiHubInstance" + }, + { + "name": "api_hub_instance_id", "type": "str" }, { @@ -8920,21 +4129,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_runtime_project_attachment" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_api_hub_instance" }, - "description": "Sample for DeleteRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py", + "description": "Sample for CreateApiHubInstance", + "file": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_async", + "regionTag": "apihub_v1_generated_Provisioning_CreateApiHubInstance_sync", "segments": [ { - "end": 49, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 59, "start": 27, "type": "SHORT" }, @@ -8944,41 +4154,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_async.py" + "title": "apihub_v1_generated_provisioning_create_api_hub_instance_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", - "shortName": "RuntimeProjectAttachmentServiceClient" + "fullName": "google.cloud.apihub_v1.ProvisioningClient", + "shortName": "ProvisioningClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.delete_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.ProvisioningClient.get_api_hub_instance", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.Provisioning.GetApiHubInstance", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.Provisioning", + "shortName": "Provisioning" }, - "shortName": "DeleteRuntimeProjectAttachment" + "shortName": "GetApiHubInstance" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.DeleteRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.GetApiHubInstanceRequest" }, { "name": "name", @@ -8997,21 +4209,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_runtime_project_attachment" + "resultType": "google.cloud.apihub_v1.types.ApiHubInstance", + "shortName": "get_api_hub_instance" }, - "description": "Sample for DeleteRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py", + "description": "Sample for GetApiHubInstance", + "file": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_sync", + "regionTag": "apihub_v1_generated_Provisioning_GetApiHubInstance_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -9026,40 +4239,41 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py" + "title": "apihub_v1_generated_provisioning_get_api_hub_instance_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.ProvisioningClient", + "shortName": "ProvisioningClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.get_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.ProvisioningClient.lookup_api_hub_instance", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.Provisioning.LookupApiHubInstance", "service": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", - "shortName": "RuntimeProjectAttachmentService" + "fullName": "google.cloud.apihub.v1.Provisioning", + "shortName": "Provisioning" }, - "shortName": "GetRuntimeProjectAttachment" + "shortName": "LookupApiHubInstance" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.LookupApiHubInstanceRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -9075,14 +4289,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", - "shortName": "get_runtime_project_attachment" + "resultType": "google.cloud.apihub_v1.types.LookupApiHubInstanceResponse", + "shortName": "lookup_api_hub_instance" }, - "description": "Sample for GetRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py", + "description": "Sample for LookupApiHubInstance", + "file": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_async", + "regionTag": "apihub_v1_generated_Provisioning_LookupApiHubInstance_sync", "segments": [ { "end": 51, @@ -9115,7 +4329,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_async.py" + "title": "apihub_v1_generated_provisioning_lookup_api_hub_instance_sync.py" }, { "canonical": true, @@ -9124,22 +4338,30 @@ "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", "shortName": "RuntimeProjectAttachmentServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.get_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.create_runtime_project_attachment", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.CreateRuntimeProjectAttachment", "service": { "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", "shortName": "RuntimeProjectAttachmentService" }, - "shortName": "GetRuntimeProjectAttachment" + "shortName": "CreateRuntimeProjectAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.GetRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.CreateRuntimeProjectAttachmentRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "runtime_project_attachment", + "type": "google.cloud.apihub_v1.types.RuntimeProjectAttachment" + }, + { + "name": "runtime_project_attachment_id", "type": "str" }, { @@ -9156,21 +4378,21 @@ } ], "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", - "shortName": "get_runtime_project_attachment" + "shortName": "create_runtime_project_attachment" }, - "description": "Sample for GetRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py", + "description": "Sample for CreateRuntimeProjectAttachment", + "file": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_sync", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_CreateRuntimeProjectAttachment_sync", "segments": [ { - "end": 51, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 56, "start": 27, "type": "SHORT" }, @@ -9180,47 +4402,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py" + "title": "apihub_v1_generated_runtime_project_attachment_service_create_runtime_project_attachment_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", + "shortName": "RuntimeProjectAttachmentServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.list_runtime_project_attachments", + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.delete_runtime_project_attachment", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments", + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.DeleteRuntimeProjectAttachment", "service": { "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", "shortName": "RuntimeProjectAttachmentService" }, - "shortName": "ListRuntimeProjectAttachments" + "shortName": "DeleteRuntimeProjectAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest" + "type": "google.cloud.apihub_v1.types.DeleteRuntimeProjectAttachmentRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -9236,22 +4457,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.runtime_project_attachment_service.pagers.ListRuntimeProjectAttachmentsAsyncPager", - "shortName": "list_runtime_project_attachments" + "shortName": "delete_runtime_project_attachment" }, - "description": "Sample for ListRuntimeProjectAttachments", - "file": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py", + "description": "Sample for DeleteRuntimeProjectAttachment", + "file": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_async", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_DeleteRuntimeProjectAttachment_sync", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -9266,17 +4486,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_async.py" + "title": "apihub_v1_generated_runtime_project_attachment_service_delete_runtime_project_attachment_sync.py" }, { "canonical": true, @@ -9285,22 +4503,22 @@ "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", "shortName": "RuntimeProjectAttachmentServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.list_runtime_project_attachments", + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.get_runtime_project_attachment", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments", + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.GetRuntimeProjectAttachment", "service": { "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", "shortName": "RuntimeProjectAttachmentService" }, - "shortName": "ListRuntimeProjectAttachments" + "shortName": "GetRuntimeProjectAttachment" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest" + "type": "google.cloud.apihub_v1.types.GetRuntimeProjectAttachmentRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -9316,22 +4534,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.services.runtime_project_attachment_service.pagers.ListRuntimeProjectAttachmentsPager", - "shortName": "list_runtime_project_attachments" + "resultType": "google.cloud.apihub_v1.types.RuntimeProjectAttachment", + "shortName": "get_runtime_project_attachment" }, - "description": "Sample for ListRuntimeProjectAttachments", - "file": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py", + "description": "Sample for GetRuntimeProjectAttachment", + "file": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_sync", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_GetRuntimeProjectAttachment_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -9351,37 +4569,36 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py" + "title": "apihub_v1_generated_runtime_project_attachment_service_get_runtime_project_attachment_sync.py" }, { "canonical": true, "clientMethod": { - "async": true, "client": { - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient", - "shortName": "RuntimeProjectAttachmentServiceAsyncClient" + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient", + "shortName": "RuntimeProjectAttachmentServiceClient" }, - "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceAsyncClient.lookup_runtime_project_attachment", + "fullName": "google.cloud.apihub_v1.RuntimeProjectAttachmentServiceClient.list_runtime_project_attachments", "method": { - "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.LookupRuntimeProjectAttachment", + "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService.ListRuntimeProjectAttachments", "service": { "fullName": "google.cloud.apihub.v1.RuntimeProjectAttachmentService", "shortName": "RuntimeProjectAttachmentService" }, - "shortName": "LookupRuntimeProjectAttachment" + "shortName": "ListRuntimeProjectAttachments" }, "parameters": [ { "name": "request", - "type": "google.cloud.apihub_v1.types.LookupRuntimeProjectAttachmentRequest" + "type": "google.cloud.apihub_v1.types.ListRuntimeProjectAttachmentsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -9397,22 +4614,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.apihub_v1.types.LookupRuntimeProjectAttachmentResponse", - "shortName": "lookup_runtime_project_attachment" + "resultType": "google.cloud.apihub_v1.services.runtime_project_attachment_service.pagers.ListRuntimeProjectAttachmentsPager", + "shortName": "list_runtime_project_attachments" }, - "description": "Sample for LookupRuntimeProjectAttachment", - "file": "apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py", + "description": "Sample for ListRuntimeProjectAttachments", + "file": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_LookupRuntimeProjectAttachment_async", + "regionTag": "apihub_v1_generated_RuntimeProjectAttachmentService_ListRuntimeProjectAttachments_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -9432,12 +4649,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "apihub_v1_generated_runtime_project_attachment_service_lookup_runtime_project_attachment_async.py" + "title": "apihub_v1_generated_runtime_project_attachment_service_list_runtime_project_attachments_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py index aaec1e3055f6..5f7c71ab5ffb 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py @@ -47,12 +47,7 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.cloud.apihub_v1.services.api_hub import ( - ApiHubAsyncClient, - ApiHubClient, - pagers, - transports, -) +from google.cloud.apihub_v1.services.api_hub import ApiHubClient, pagers, transports from google.cloud.apihub_v1.types import apihub_service, common_fields @@ -180,11 +175,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubClient), ) -@mock.patch.object( - ApiHubAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -266,7 +256,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc"), (ApiHubClient, transports.ApiHubRestTransport, "rest"), ], ) @@ -346,8 +335,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubClient, "grpc"), - (ApiHubAsyncClient, "grpc_asyncio"), (ApiHubClient, "rest"), ], ) @@ -372,8 +359,6 @@ def test_api_hub_client_from_service_account_info(client_class, transport_name): @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.ApiHubGrpcTransport, "grpc"), - (transports.ApiHubGrpcAsyncIOTransport, "grpc_asyncio"), (transports.ApiHubRestTransport, "rest"), ], ) @@ -396,8 +381,6 @@ def test_api_hub_client_service_account_always_use_jwt(transport_class, transpor @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubClient, "grpc"), - (ApiHubAsyncClient, "grpc_asyncio"), (ApiHubClient, "rest"), ], ) @@ -429,20 +412,17 @@ def test_api_hub_client_from_service_account_file(client_class, transport_name): def test_api_hub_client_get_transport_class(): transport = ApiHubClient.get_transport_class() available_transports = [ - transports.ApiHubGrpcTransport, transports.ApiHubRestTransport, ] assert transport in available_transports - transport = ApiHubClient.get_transport_class("grpc") - assert transport == transports.ApiHubGrpcTransport + transport = ApiHubClient.get_transport_class("rest") + assert transport == transports.ApiHubRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc"), - (ApiHubAsyncClient, transports.ApiHubGrpcAsyncIOTransport, "grpc_asyncio"), (ApiHubClient, transports.ApiHubRestTransport, "rest"), ], ) @@ -451,11 +431,6 @@ def test_api_hub_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubClient), ) -@mock.patch.object( - ApiHubAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubAsyncClient), -) def test_api_hub_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object(ApiHubClient, "get_transport_class") as gtc: @@ -587,20 +562,6 @@ def test_api_hub_client_client_options(client_class, transport_class, transport_ @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc", "true"), - ( - ApiHubAsyncClient, - transports.ApiHubGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc", "false"), - ( - ApiHubAsyncClient, - transports.ApiHubGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), (ApiHubClient, transports.ApiHubRestTransport, "rest", "true"), (ApiHubClient, transports.ApiHubRestTransport, "rest", "false"), ], @@ -610,11 +571,6 @@ def test_api_hub_client_client_options(client_class, transport_class, transport_ "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubClient), ) -@mock.patch.object( - ApiHubAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_api_hub_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -718,13 +674,10 @@ def test_api_hub_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [ApiHubClient, ApiHubAsyncClient]) +@pytest.mark.parametrize("client_class", [ApiHubClient]) @mock.patch.object( ApiHubClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubClient) ) -@mock.patch.object( - ApiHubAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubAsyncClient) -) def test_api_hub_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -816,17 +769,12 @@ def test_api_hub_client_get_mtls_endpoint_and_cert_source(client_class): ) -@pytest.mark.parametrize("client_class", [ApiHubClient, ApiHubAsyncClient]) +@pytest.mark.parametrize("client_class", [ApiHubClient]) @mock.patch.object( ApiHubClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubClient), ) -@mock.patch.object( - ApiHubAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubAsyncClient), -) def test_api_hub_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -903,8 +851,6 @@ def test_api_hub_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc"), - (ApiHubAsyncClient, transports.ApiHubGrpcAsyncIOTransport, "grpc_asyncio"), (ApiHubClient, transports.ApiHubRestTransport, "rest"), ], ) @@ -936,13 +882,6 @@ def test_api_hub_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc", grpc_helpers), - ( - ApiHubAsyncClient, - transports.ApiHubGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), (ApiHubClient, transports.ApiHubRestTransport, "rest", None), ], ) @@ -970,89 +909,6 @@ def test_api_hub_client_client_options_credentials_file( ) -def test_api_hub_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.api_hub.transports.ApiHubGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = ApiHubClient(client_options={"api_endpoint": "squid.clam.whelk"}) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - (ApiHubClient, transports.ApiHubGrpcTransport, "grpc", grpc_helpers), - ( - ApiHubAsyncClient, - transports.ApiHubGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_api_hub_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1060,33 +916,135 @@ def test_api_hub_client_create_channel_credentials_file( dict, ], ) -def test_create_api(request_type, transport: str = "grpc"): +def test_create_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["api"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "owner": {"display_name": "display_name_value", "email": "email_value"}, + "versions": ["versions_value1", "versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "target_user": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "team": {}, + "business_unit": {}, + "maturity_level": {}, + "attributes": {}, + "api_style": {}, + "selected_version": "selected_version_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateApiRequest.meta.fields["api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["api"][field])): + del request_init["api"][field][i][subfield] + else: + del request_init["api"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api( name="name_value", display_name="display_name_value", description="description_value", versions=["versions_value"], selected_version="selected_version_value", ) - response = client.create_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Api) @@ -1097,62 +1055,13 @@ def test_create_api(request_type, transport: str = "grpc"): assert response.selected_version == "selected_version_value" -def test_create_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateApiRequest() - - -def test_create_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateApiRequest( - parent="parent_value", - api_id="api_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateApiRequest( - parent="parent_value", - api_id="api_id_value", - ) - - -def test_create_api_use_cached_wrapped_rpc(): +def test_create_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1168,6 +1077,7 @@ def test_create_api_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.create_api] = mock_rpc + request = {} client.create_api(request) @@ -1181,272 +1091,228 @@ def test_create_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.create_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateApiRequest() - +def test_create_api_rest_required_fields(request_type=apihub_service.CreateApiRequest): + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_create_api_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # verify fields with default values are dropped - # Ensure method has been cached - assert ( - client._client._transport.create_api - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_api - ] = mock_rpc + # verify required fields with default values are now present - request = {} - await client.create_api(request) + jsonified_request["parent"] = "parent_value" - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("api_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateApiRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.create_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateApiRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api(request) -@pytest.mark.asyncio -async def test_create_api_async_from_dict(): - await test_create_api_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_api_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateApiRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - call.return_value = common_fields.Api() - client.create_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("apiId",)) + & set( + ( + "parent", + "api", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_api_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateApiRequest.pb( + apihub_service.CreateApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateApiRequest() - - request.parent = "parent_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - await client.create_api(request) + request = apihub_service.CreateApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Api() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.create_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_create_api_flattened(): +def test_create_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_api( - parent="parent_value", - api=common_fields.Api(name="name_value"), - api_id="api_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].api - mock_val = common_fields.Api(name="name_value") - assert arg == mock_val - arg = args[0].api_id - mock_val = "api_id_value" - assert arg == mock_val - - -def test_create_api_flattened_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_api( - apihub_service.CreateApiRequest(), - parent="parent_value", - api=common_fields.Api(name="name_value"), - api_id="api_id_value", - ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_api(request) -@pytest.mark.asyncio -async def test_create_api_flattened_async(): - client = ApiHubAsyncClient( +def test_create_api_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_api( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", api=common_fields.Api(name="name_value"), api_id="api_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].api - mock_val = common_fields.Api(name="name_value") - assert arg == mock_val - arg = args[0].api_id - mock_val = "api_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_api_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, + args[1], + ) + + +def test_create_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_api( + client.create_api( apihub_service.CreateApiRequest(), parent="parent_value", api=common_fields.Api(name="name_value"), @@ -1454,6 +1320,12 @@ async def test_create_api_flattened_error_async(): ) +def test_create_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1461,33 +1333,37 @@ async def test_create_api_flattened_error_async(): dict, ], ) -def test_get_api(request_type, transport: str = "grpc"): +def test_get_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api( name="name_value", display_name="display_name_value", description="description_value", versions=["versions_value"], selected_version="selected_version_value", ) - response = client.get_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Api) @@ -1498,60 +1374,13 @@ def test_get_api(request_type, transport: str = "grpc"): assert response.selected_version == "selected_version_value" -def test_get_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiRequest() - - -def test_get_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetApiRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiRequest( - name="name_value", - ) - - -def test_get_api_use_cached_wrapped_rpc(): +def test_get_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1567,6 +1396,7 @@ def test_get_api_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_api] = mock_rpc + request = {} client.get_api(request) @@ -1580,259 +1410,224 @@ def test_get_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.get_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiRequest() - - -@pytest.mark.asyncio -async def test_get_api_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +def test_get_api_rest_required_fields(request_type=apihub_service.GetApiRequest): + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_api - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_api - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_api(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_api(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetApiRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.get_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetApiRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api(request) -@pytest.mark.asyncio -async def test_get_api_async_from_dict(): - await test_get_api_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_api_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetApiRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - call.return_value = common_fields.Api() - client.get_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_api_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetApiRequest.pb(apihub_service.GetApiRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetApiRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - await client.get_api(request) + request = apihub_service.GetApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Api() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.get_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_get_api_flattened(): +def test_get_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_api( - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_api(request) -def test_get_api_flattened_error(): +def test_get_api_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api( - apihub_service.GetApiRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_api_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_api( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_api_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_api( + client.get_api( apihub_service.GetApiRequest(), name="name_value", ) +def test_get_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1840,93 +1635,46 @@ async def test_get_api_flattened_error_async(): dict, ], ) -def test_list_apis(request_type, transport: str = "grpc"): +def test_list_apis_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApisResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApisResponse( next_page_token="next_page_token_value", ) - response = client.list_apis(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListApisRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_apis(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListApisPager) assert response.next_page_token == "next_page_token_value" -def test_list_apis_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_apis() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApisRequest() - - -def test_list_apis_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListApisRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_apis(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApisRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_apis_use_cached_wrapped_rpc(): +def test_list_apis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1942,6 +1690,7 @@ def test_list_apis_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.list_apis] = mock_rpc + request = {} client.list_apis(request) @@ -1955,262 +1704,250 @@ def test_list_apis_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_apis_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_apis_rest_required_fields(request_type=apihub_service.ListApisRequest): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApisResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_apis() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApisRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_apis._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_apis_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_apis - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_apis._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_apis - ] = mock_rpc - - request = {} - await client.list_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_apis(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_apis_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListApisRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApisResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApisResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_apis(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListApisRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListApisAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_apis(request) -@pytest.mark.asyncio -async def test_list_apis_async_from_dict(): - await test_list_apis_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_apis_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_apis_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListApisRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - call.return_value = apihub_service.ListApisResponse() - client.list_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_apis._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_apis_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_apis_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_apis" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_apis" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListApisRequest.pb(apihub_service.ListApisRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListApisRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListApisResponse.to_json( apihub_service.ListApisResponse() ) - await client.list_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_apis_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.ListApisRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListApisResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApisResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_apis( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_apis_flattened_error(): +def test_list_apis_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListApisRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_apis( - apihub_service.ListApisRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_apis(request) -@pytest.mark.asyncio -async def test_list_apis_flattened_async(): - client = ApiHubAsyncClient( +def test_list_apis_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApisResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApisResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApisResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_apis( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_apis(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_apis_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_apis_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_apis( + client.list_apis( apihub_service.ListApisRequest(), parent="parent_value", ) -def test_list_apis_pager(transport_name: str = "grpc"): +def test_list_apis_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApisResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + apihub_service.ListApisResponse( apis=[ common_fields.Api(), common_fields.Api(), @@ -2234,162 +1971,27 @@ def test_list_apis_pager(transport_name: str = "grpc"): common_fields.Api(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_apis(request={}, retry=retry, timeout=timeout) + # Wrap the values into proper Response objs + response = tuple(apihub_service.ListApisResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_apis(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Api) for i in results) - -def test_list_apis_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_apis), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - common_fields.Api(), - ], - next_page_token="abc", - ), - apihub_service.ListApisResponse( - apis=[], - next_page_token="def", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - ], - next_page_token="ghi", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - ], - ), - RuntimeError, - ) - pages = list(client.list_apis(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_apis_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_apis), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - common_fields.Api(), - ], - next_page_token="abc", - ), - apihub_service.ListApisResponse( - apis=[], - next_page_token="def", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - ], - next_page_token="ghi", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_apis( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Api) for i in responses) - - -@pytest.mark.asyncio -async def test_list_apis_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_apis), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - common_fields.Api(), - ], - next_page_token="abc", - ), - apihub_service.ListApisResponse( - apis=[], - next_page_token="def", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - ], - next_page_token="ghi", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_apis(request={}) - ).pages: - pages.append(page_) + pages = list(client.list_apis(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2401,33 +2003,135 @@ async def test_list_apis_async_pages(): dict, ], ) -def test_update_api(request_type, transport: str = "grpc"): +def test_update_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} + request_init["api"] = { + "name": "projects/sample1/locations/sample2/apis/sample3", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "owner": {"display_name": "display_name_value", "email": "email_value"}, + "versions": ["versions_value1", "versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "target_user": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "team": {}, + "business_unit": {}, + "maturity_level": {}, + "attributes": {}, + "api_style": {}, + "selected_version": "selected_version_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateApiRequest.meta.fields["api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api( + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["api"][field])): + del request_init["api"][field][i][subfield] + else: + del request_init["api"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api( name="name_value", display_name="display_name_value", description="description_value", versions=["versions_value"], selected_version="selected_version_value", ) - response = client.update_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Api) @@ -2438,61 +2142,18 @@ def test_update_api(request_type, transport: str = "grpc"): assert response.selected_version == "selected_version_value" -def test_update_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. +def test_update_api_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client.update_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateApiRequest() - -def test_update_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateApiRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateApiRequest() - - -def test_update_api_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.update_api in client._transport._wrapped_methods @@ -2503,6 +2164,7 @@ def test_update_api_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.update_api] = mock_rpc + request = {} client.update_api(request) @@ -2516,210 +2178,218 @@ def test_update_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.update_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateApiRequest() - +def test_update_api_rest_required_fields(request_type=apihub_service.UpdateApiRequest): + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_update_api_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # verify fields with default values are dropped - # Ensure method has been cached - assert ( - client._client._transport.update_api - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_api - ] = mock_rpc + # verify required fields with default values are now present - request = {} - await client.update_api(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with non-default values are left alone - await client.update_api(request) + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -@pytest.mark.asyncio -async def test_update_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateApiRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - ) - response = await client.update_api(request) + response = client.update_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateApiRequest() - assert args[0] == request + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" +def test_update_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) -@pytest.mark.asyncio -async def test_update_api_async_from_dict(): - await test_update_api_async(request_type=dict) + unset_fields = transport.update_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "api", + "updateMask", + ) + ) + ) -def test_update_api_field_headers(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateApiRequest.pb( + apihub_service.UpdateApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateApiRequest() - - request.api.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - call.return_value = common_fields.Api() - client.update_api(request) + request = apihub_service.UpdateApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Api() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.update_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "api.name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -@pytest.mark.asyncio -async def test_update_api_field_headers_async(): - client = ApiHubAsyncClient( +def test_update_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateApiRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateApiRequest() - - request.api.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - await client.update_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "api.name=name_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_api(request) -def test_update_api_flattened(): +def test_update_api_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Api() + + # get arguments that satisfy an http rule for this method + sample_request = { + "api": {"name": "projects/sample1/locations/sample2/apis/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( api=common_fields.Api(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Api.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].api - mock_val = common_fields.Api(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{api.name=projects/*/locations/*/apis/*}" % client.transport._host, + args[1], + ) -def test_update_api_flattened_error(): +def test_update_api_rest_flattened_error(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2732,140 +2402,54 @@ def test_update_api_flattened_error(): ) -@pytest.mark.asyncio -async def test_update_api_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Api() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Api()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_api( - api=common_fields.Api(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].api - mock_val = common_fields.Api(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_api_flattened_error_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.DeleteApiRequest, + dict, + ], +) +def test_delete_api_rest(request_type): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_api( - apihub_service.UpdateApiRequest(), - api=common_fields.Api(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteApiRequest, - dict, - ], -) -def test_delete_api(request_type, transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value response = client.delete_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteApiRequest() - assert args[0] == request - # Establish that the response is the type that we expect. assert response is None -def test_delete_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteApiRequest() - - -def test_delete_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteApiRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteApiRequest( - name="name_value", - ) - - -def test_delete_api_use_cached_wrapped_rpc(): +def test_delete_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2881,6 +2465,7 @@ def test_delete_api_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.delete_api] = mock_rpc + request = {} client.delete_api(request) @@ -2894,185 +2479,200 @@ def test_delete_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteApiRequest() - - -@pytest.mark.asyncio -async def test_delete_api_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +def test_delete_api_rest_required_fields(request_type=apihub_service.DeleteApiRequest): + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.delete_api - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_api - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.delete_api(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.delete_api(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_delete_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteApiRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_api(request) + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteApiRequest() - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the response is the type that we expect. - assert response is None + response = client.delete_api(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_api_async_from_dict(): - await test_delete_api_async(request_type=dict) +def test_delete_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + unset_fields = transport.delete_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) -def test_delete_api_field_headers(): - client = ApiHubClient( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_api" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteApiRequest.pb( + apihub_service.DeleteApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteApiRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - call.return_value = None - client.delete_api(request) + request = apihub_service.DeleteApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.delete_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() -@pytest.mark.asyncio -async def test_delete_api_field_headers_async(): - client = ApiHubAsyncClient( +def test_delete_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteApiRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteApiRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_api(request) -def test_delete_api_flattened(): +def test_delete_api_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, + args[1], + ) -def test_delete_api_flattened_error(): +def test_delete_api_rest_flattened_error(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -3084,47 +2684,11 @@ def test_delete_api_flattened_error(): ) -@pytest.mark.asyncio -async def test_delete_api_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_api( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_api_flattened_error_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_api( - apihub_service.DeleteApiRequest(), - name="name_value", - ) - @pytest.mark.parametrize( "request_type", @@ -3133,20 +2697,118 @@ async def test_delete_api_flattened_error_async(): dict, ], ) -def test_create_version(request_type, transport: str = "grpc"): +def test_create_version_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request_init["version"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "specs": ["specs_value1", "specs_value2"], + "api_operations": ["api_operations_value1", "api_operations_value2"], + "definitions": ["definitions_value1", "definitions_value2"], + "deployments": ["deployments_value1", "deployments_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lifecycle": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "compliance": {}, + "accreditation": {}, + "attributes": {}, + "selected_deployment": "selected_deployment_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateVersionRequest.meta.fields["version"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["version"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version( + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["version"][field])): + del request_init["version"][field][i][subfield] + else: + del request_init["version"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version( name="name_value", display_name="display_name_value", description="description_value", @@ -3156,13 +2818,17 @@ def test_create_version(request_type, transport: str = "grpc"): deployments=["deployments_value"], selected_deployment="selected_deployment_value", ) - response = client.create_version(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateVersionRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_version(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Version) @@ -3176,62 +2842,13 @@ def test_create_version(request_type, transport: str = "grpc"): assert response.selected_deployment == "selected_deployment_value" -def test_create_version_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateVersionRequest() - - -def test_create_version_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateVersionRequest( - parent="parent_value", - version_id="version_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_version(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateVersionRequest( - parent="parent_value", - version_id="version_id_value", - ) - - -def test_create_version_use_cached_wrapped_rpc(): +def test_create_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -3247,6 +2864,7 @@ def test_create_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.create_version] = mock_rpc + request = {} client.create_version(request) @@ -3260,287 +2878,233 @@ def test_create_version_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_version_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.create_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateVersionRequest() - - -@pytest.mark.asyncio -async def test_create_version_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_create_version_rest_required_fields( + request_type=apihub_service.CreateVersionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.create_version - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_version - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.create_version(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.create_version(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_version._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("version_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_version_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateVersionRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.create_version(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateVersionRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_version(request) -@pytest.mark.asyncio -async def test_create_version_async_from_dict(): - await test_create_version_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_version_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateVersionRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - call.return_value = common_fields.Version() - client.create_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_version._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("versionId",)) + & set( + ( + "parent", + "version", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_version_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_version" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateVersionRequest.pb( + apihub_service.CreateVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateVersionRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Version.to_json( common_fields.Version() ) - await client.create_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - -def test_create_version_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.CreateVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Version() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_version( - parent="parent_value", - version=common_fields.Version(name="name_value"), - version_id="version_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].version - mock_val = common_fields.Version(name="name_value") - assert arg == mock_val - arg = args[0].version_id - mock_val = "version_id_value" - assert arg == mock_val - - -def test_create_version_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateVersionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_version( - apihub_service.CreateVersionRequest(), - parent="parent_value", - version=common_fields.Version(name="name_value"), - version_id="version_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_version(request) -@pytest.mark.asyncio -async def test_create_version_flattened_async(): - client = ApiHubAsyncClient( +def test_create_version_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_version( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", version=common_fields.Version(name="name_value"), version_id="version_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].version - mock_val = common_fields.Version(name="name_value") - assert arg == mock_val - arg = args[0].version_id - mock_val = "version_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_version_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" + % client.transport._host, + args[1], + ) + + +def test_create_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_version( + client.create_version( apihub_service.CreateVersionRequest(), parent="parent_value", version=common_fields.Version(name="name_value"), @@ -3548,6 +3112,12 @@ async def test_create_version_flattened_error_async(): ) +def test_create_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3555,20 +3125,22 @@ async def test_create_version_flattened_error_async(): dict, ], ) -def test_get_version(request_type, transport: str = "grpc"): +def test_get_version_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version( name="name_value", display_name="display_name_value", description="description_value", @@ -3578,13 +3150,17 @@ def test_get_version(request_type, transport: str = "grpc"): deployments=["deployments_value"], selected_deployment="selected_deployment_value", ) - response = client.get_version(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetVersionRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_version(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Version) @@ -3598,60 +3174,13 @@ def test_get_version(request_type, transport: str = "grpc"): assert response.selected_deployment == "selected_deployment_value" -def test_get_version_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetVersionRequest() - - -def test_get_version_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetVersionRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_version(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetVersionRequest( - name="name_value", - ) - - -def test_get_version_use_cached_wrapped_rpc(): +def test_get_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -3667,6 +3196,7 @@ def test_get_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_version] = mock_rpc + request = {} client.get_version(request) @@ -3680,274 +3210,235 @@ def test_get_version_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_version_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.get_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetVersionRequest() - - -@pytest.mark.asyncio -async def test_get_version_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_version_rest_required_fields( + request_type=apihub_service.GetVersionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_version - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_version - ] = mock_rpc + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.get_version(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.get_version(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + jsonified_request["name"] = "name_value" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_get_version_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetVersionRequest -): - client = ApiHubAsyncClient( + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.get_version(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetVersionRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_version(request) -@pytest.mark.asyncio -async def test_get_version_async_from_dict(): - await test_get_version_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_version_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetVersionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - call.return_value = common_fields.Version() - client.get_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_version_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_version" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetVersionRequest.pb( + apihub_service.GetVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetVersionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Version.to_json( common_fields.Version() ) - await client.get_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - -def test_get_version_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Version() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_version( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_version_flattened_error(): +def test_get_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetVersionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_version( - apihub_service.GetVersionRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_version(request) -@pytest.mark.asyncio -async def test_get_version_flattened_async(): - client = ApiHubAsyncClient( +def test_get_version_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_version( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_version_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_version( + client.get_version( apihub_service.GetVersionRequest(), name="name_value", ) +def test_get_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3955,93 +3446,46 @@ async def test_get_version_flattened_error_async(): dict, ], ) -def test_list_versions(request_type, transport: str = "grpc"): +def test_list_versions_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListVersionsResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListVersionsResponse( next_page_token="next_page_token_value", ) - response = client.list_versions(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListVersionsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_versions(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListVersionsPager) assert response.next_page_token == "next_page_token_value" -def test_list_versions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_versions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListVersionsRequest() - - -def test_list_versions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListVersionsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_versions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListVersionsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_versions_use_cached_wrapped_rpc(): +def test_list_versions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -4057,6 +3501,7 @@ def test_list_versions_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.list_versions] = mock_rpc + request = {} client.list_versions(request) @@ -4070,263 +3515,254 @@ def test_list_versions_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_versions_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_versions_rest_required_fields( + request_type=apihub_service.ListVersionsRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListVersionsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_versions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListVersionsRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_versions_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_versions - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_versions - ] = mock_rpc - - request = {} - await client.list_versions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_versions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_versions_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListVersionsRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListVersionsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_versions(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListVersionsRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVersionsAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_versions(request) -@pytest.mark.asyncio -async def test_list_versions_async_from_dict(): - await test_list_versions_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_versions_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_versions_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListVersionsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - call.return_value = apihub_service.ListVersionsResponse() - client.list_versions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_versions_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_versions_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_versions" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_versions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListVersionsRequest.pb( + apihub_service.ListVersionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListVersionsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListVersionsResponse.to_json( apihub_service.ListVersionsResponse() ) - await client.list_versions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + request = apihub_service.ListVersionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListVersionsResponse() -def test_list_versions_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListVersionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_versions( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_versions_flattened_error(): +def test_list_versions_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListVersionsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_versions( - apihub_service.ListVersionsRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_versions(request) -@pytest.mark.asyncio -async def test_list_versions_flattened_async(): - client = ApiHubAsyncClient( +def test_list_versions_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListVersionsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListVersionsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_versions( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_versions(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_versions_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_versions_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_versions( + client.list_versions( apihub_service.ListVersionsRequest(), parent="parent_value", ) -def test_list_versions_pager(transport_name: str = "grpc"): +def test_list_versions_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListVersionsResponse( versions=[ common_fields.Version(), @@ -4351,164 +3787,31 @@ def test_list_versions_pager(transport_name: str = "grpc"): common_fields.Version(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListVersionsResponse.to_json(x) for x in response ) - pager = client.list_versions(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_versions(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Version) for i in results) - -def test_list_versions_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_versions), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - common_fields.Version(), - ], - next_page_token="abc", - ), - apihub_service.ListVersionsResponse( - versions=[], - next_page_token="def", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - ], - next_page_token="ghi", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - ], - ), - RuntimeError, - ) - pages = list(client.list_versions(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_versions_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_versions), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - common_fields.Version(), - ], - next_page_token="abc", - ), - apihub_service.ListVersionsResponse( - versions=[], - next_page_token="def", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - ], - next_page_token="ghi", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_versions( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Version) for i in responses) - - -@pytest.mark.asyncio -async def test_list_versions_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_versions), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - common_fields.Version(), - ], - next_page_token="abc", - ), - apihub_service.ListVersionsResponse( - versions=[], - next_page_token="def", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - ], - next_page_token="ghi", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_versions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + pages = list(client.list_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( @@ -4518,20 +3821,122 @@ async def test_list_versions_async_pages(): dict, ], ) -def test_update_version(request_type, transport: str = "grpc"): +def test_update_version_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "version": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + } + request_init["version"] = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "specs": ["specs_value1", "specs_value2"], + "api_operations": ["api_operations_value1", "api_operations_value2"], + "definitions": ["definitions_value1", "definitions_value2"], + "deployments": ["deployments_value1", "deployments_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lifecycle": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "compliance": {}, + "accreditation": {}, + "attributes": {}, + "selected_deployment": "selected_deployment_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateVersionRequest.meta.fields["version"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["version"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["version"][field])): + del request_init["version"][field][i][subfield] + else: + del request_init["version"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version( name="name_value", display_name="display_name_value", description="description_value", @@ -4541,13 +3946,17 @@ def test_update_version(request_type, transport: str = "grpc"): deployments=["deployments_value"], selected_deployment="selected_deployment_value", ) - response = client.update_version(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateVersionRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_version(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Version) @@ -4561,56 +3970,13 @@ def test_update_version(request_type, transport: str = "grpc"): assert response.selected_deployment == "selected_deployment_value" -def test_update_version_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateVersionRequest() - - -def test_update_version_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateVersionRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_version(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateVersionRequest() - - -def test_update_version_use_cached_wrapped_rpc(): +def test_update_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -4626,6 +3992,7 @@ def test_update_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.update_version] = mock_rpc + request = {} client.update_version(request) @@ -4639,284 +4006,247 @@ def test_update_version_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_version_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.update_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateVersionRequest() - - -@pytest.mark.asyncio -async def test_update_version_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_version_rest_required_fields( + request_type=apihub_service.UpdateVersionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.ApiHubRestTransport - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_version - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_version - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_version(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_version(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_version._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_version_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateVersionRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - ) - response = await client.update_version(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateVersionRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_version(request) -@pytest.mark.asyncio -async def test_update_version_async_from_dict(): - await test_update_version_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_version_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateVersionRequest() - - request.version.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - call.return_value = common_fields.Version() - client.update_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "version.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_version._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "version", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_version_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_version" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_version" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateVersionRequest.pb( + apihub_service.UpdateVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateVersionRequest() - - request.version.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Version.to_json( common_fields.Version() ) - await client.update_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "version.name=name_value", - ) in kw["metadata"] - - -def test_update_version_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.UpdateVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Version() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.update_version( - version=common_fields.Version(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].version - mock_val = common_fields.Version(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_update_version_flattened_error(): +def test_update_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateVersionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_version( - apihub_service.UpdateVersionRequest(), - version=common_fields.Version(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "version": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_version(request) -@pytest.mark.asyncio -async def test_update_version_flattened_async(): - client = ApiHubAsyncClient( +def test_update_version_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Version() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Version() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Version() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_version( + # get arguments that satisfy an http rule for this method + sample_request = { + "version": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( version=common_fields.Version(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Version.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].version - mock_val = common_fields.Version(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_version_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{version.name=projects/*/locations/*/apis/*/versions/*}" + % client.transport._host, + args[1], + ) + + +def test_update_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_version( + client.update_version( apihub_service.UpdateVersionRequest(), version=common_fields.Version(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -4924,86 +4254,43 @@ async def test_update_version_flattened_error_async(): dict, ], ) -def test_delete_version(request_type, transport: str = "grpc"): +def test_delete_version_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_version(request) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteVersionRequest() - assert args[0] == request + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_version_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteVersionRequest() - - -def test_delete_version_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteVersionRequest( - name="name_value", - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_version(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_version(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteVersionRequest( - name="name_value", - ) + # Establish that the response is the type that we expect. + assert response is None -def test_delete_version_use_cached_wrapped_rpc(): +def test_delete_version_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -5019,6 +4306,7 @@ def test_delete_version_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.delete_version] = mock_rpc + request = {} client.delete_version(request) @@ -5032,240 +4320,224 @@ def test_delete_version_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_version_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_version() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteVersionRequest() - - -@pytest.mark.asyncio -async def test_delete_version_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_version_rest_required_fields( + request_type=apihub_service.DeleteVersionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.delete_version - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_version - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.delete_version(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.delete_version(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_version._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_delete_version_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteVersionRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_version(request) + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteVersionRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Establish that the response is the type that we expect. - assert response is None + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_version(request) -@pytest.mark.asyncio -async def test_delete_version_async_from_dict(): - await test_delete_version_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_delete_version_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_version_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteVersionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - call.return_value = None - client.delete_version(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.delete_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) -@pytest.mark.asyncio -async def test_delete_version_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_version_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_version" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteVersionRequest.pb( + apihub_service.DeleteVersionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteVersionRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_version(request) + request = apihub_service.DeleteVersionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.delete_version( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() -def test_delete_version_flattened(): +def test_delete_version_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteVersionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_version( - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_version(request) -def test_delete_version_flattened_error(): +def test_delete_version_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_version( - apihub_service.DeleteVersionRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_version_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_version), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_version( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_version(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_version_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_version_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_version( + client.delete_version( apihub_service.DeleteVersionRequest(), name="name_value", ) +def test_delete_version_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -5273,32 +4545,155 @@ async def test_delete_version_flattened_error_async(): dict, ], ) -def test_create_spec(request_type, transport: str = "grpc"): +def test_create_spec_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec( - name="name_value", + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request_init["spec"] = { + "name": "name_value", + "display_name": "display_name_value", + "spec_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, + "details": { + "open_api_spec_details": { + "format_": 1, + "version": "version_value", + "owner": {"display_name": "display_name_value", "email": "email_value"}, + }, + "description": "description_value", + }, + "source_uri": "source_uri_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lint_response": { + "issues": [ + { + "code": "code_value", + "path": ["path_value1", "path_value2"], + "message": "message_value", + "severity": 1, + "range_": {"start": {"line": 424, "character": 941}, "end": {}}, + } + ], + "summary": [{"severity": 1, "count": 553}], + "state": 1, + "source": "source_value", + "linter": 1, + "create_time": {}, + }, + "attributes": {}, + "documentation": {"external_uri": "external_uri_value"}, + "parsing_mode": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateSpecRequest.meta.fields["spec"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["spec"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["spec"][field])): + del request_init["spec"][field][i][subfield] + else: + del request_init["spec"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec( + name="name_value", display_name="display_name_value", source_uri="source_uri_value", parsing_mode=common_fields.Spec.ParsingMode.RELAXED, ) - response = client.create_spec(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateSpecRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_spec(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Spec) @@ -5308,62 +4703,13 @@ def test_create_spec(request_type, transport: str = "grpc"): assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED -def test_create_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateSpecRequest() - - -def test_create_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateSpecRequest( - parent="parent_value", - spec_id="spec_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateSpecRequest( - parent="parent_value", - spec_id="spec_id_value", - ) - - -def test_create_spec_use_cached_wrapped_rpc(): +def test_create_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -5379,6 +4725,7 @@ def test_create_spec_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.create_spec] = mock_rpc + request = {} client.create_spec(request) @@ -5392,271 +4739,235 @@ def test_create_spec_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.create_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateSpecRequest() - - -@pytest.mark.asyncio -async def test_create_spec_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_create_spec_rest_required_fields( + request_type=apihub_service.CreateSpecRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.create_spec - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_spec - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.create_spec(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.create_spec(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_spec._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("spec_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_spec_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateSpecRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.create_spec(request) + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateSpecRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_create_spec_async_from_dict(): - await test_create_spec_async(request_type=dict) - - -def test_create_spec_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateSpecRequest() + response = client.create_spec(request) - request.parent = "parent_value" + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - call.return_value = common_fields.Spec() - client.create_spec(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_create_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_spec._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("specId",)) + & set( + ( + "parent", + "spec", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_spec_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_spec" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_spec" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateSpecRequest.pb( + apihub_service.CreateSpecRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateSpecRequest() - - request.parent = "parent_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - await client.create_spec(request) + request = apihub_service.CreateSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Spec() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.create_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_create_spec_flattened(): +def test_create_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateSpecRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_spec( - parent="parent_value", - spec=common_fields.Spec(name="name_value"), - spec_id="spec_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].spec - mock_val = common_fields.Spec(name="name_value") - assert arg == mock_val - arg = args[0].spec_id - mock_val = "spec_id_value" - assert arg == mock_val - - -def test_create_spec_flattened_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_spec( - apihub_service.CreateSpecRequest(), - parent="parent_value", - spec=common_fields.Spec(name="name_value"), - spec_id="spec_id_value", - ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_spec(request) -@pytest.mark.asyncio -async def test_create_spec_flattened_async(): - client = ApiHubAsyncClient( +def test_create_spec_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_spec( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", spec=common_fields.Spec(name="name_value"), spec_id="spec_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].spec - mock_val = common_fields.Spec(name="name_value") - assert arg == mock_val - arg = args[0].spec_id - mock_val = "spec_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_spec_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" + % client.transport._host, + args[1], + ) + + +def test_create_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_spec( + client.create_spec( apihub_service.CreateSpecRequest(), parent="parent_value", spec=common_fields.Spec(name="name_value"), @@ -5664,6 +4975,12 @@ async def test_create_spec_flattened_error_async(): ) +def test_create_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -5671,32 +4988,38 @@ async def test_create_spec_flattened_error_async(): dict, ], ) -def test_get_spec(request_type, transport: str = "grpc"): +def test_get_spec_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec( name="name_value", display_name="display_name_value", source_uri="source_uri_value", parsing_mode=common_fields.Spec.ParsingMode.RELAXED, ) - response = client.get_spec(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetSpecRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Spec) @@ -5706,60 +5029,13 @@ def test_get_spec(request_type, transport: str = "grpc"): assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED -def test_get_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecRequest() - - -def test_get_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetSpecRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecRequest( - name="name_value", - ) - - -def test_get_spec_use_cached_wrapped_rpc(): +def test_get_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -5775,6 +5051,7 @@ def test_get_spec_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_spec] = mock_rpc + request = {} client.get_spec(request) @@ -5788,256 +5065,229 @@ def test_get_spec_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.get_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecRequest() - +def test_get_spec_rest_required_fields(request_type=apihub_service.GetSpecRequest): + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_get_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # verify fields with default values are dropped - # Ensure method has been cached - assert ( - client._client._transport.get_spec - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_spec - ] = mock_rpc + # verify required fields with default values are now present - request = {} - await client.get_spec(request) + jsonified_request["name"] = "name_value" - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.get_spec(request) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_get_spec_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetSpecRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.get_spec(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetSpecRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec(request) -@pytest.mark.asyncio -async def test_get_spec_async_from_dict(): - await test_get_spec_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_spec_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetSpecRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - call.return_value = common_fields.Spec() - client.get_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_spec._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_spec_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_spec" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_spec" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetSpecRequest.pb(apihub_service.GetSpecRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetSpecRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - await client.get_spec(request) + request = apihub_service.GetSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Spec() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.get_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_get_spec_flattened(): +def test_get_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetSpecRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_spec( - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_spec(request) -def test_get_spec_flattened_error(): +def test_get_spec_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_spec( - apihub_service.GetSpecRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_spec_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_spec( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_spec_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_spec( + client.get_spec( apihub_service.GetSpecRequest(), name="name_value", ) +def test_get_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -6045,32 +5295,36 @@ async def test_get_spec_flattened_error_async(): dict, ], ) -def test_get_spec_contents(request_type, transport: str = "grpc"): +def test_get_spec_contents_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.SpecContents( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.SpecContents( contents=b"contents_blob", mime_type="mime_type_value", ) - response = client.get_spec_contents(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetSpecContentsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.SpecContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec_contents(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.SpecContents) @@ -6078,64 +5332,13 @@ def test_get_spec_contents(request_type, transport: str = "grpc"): assert response.mime_type == "mime_type_value" -def test_get_spec_contents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_spec_contents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecContentsRequest() - - -def test_get_spec_contents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetSpecContentsRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_spec_contents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecContentsRequest( - name="name_value", - ) - - -def test_get_spec_contents_use_cached_wrapped_rpc(): +def test_get_spec_contents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -6153,6 +5356,7 @@ def test_get_spec_contents_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_spec_contents ] = mock_rpc + request = {} client.get_spec_contents(request) @@ -6166,268 +5370,235 @@ def test_get_spec_contents_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_spec_contents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_get_spec_contents_rest_required_fields( + request_type=apihub_service.GetSpecContentsRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.SpecContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - ) - response = await client.get_spec_contents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetSpecContentsRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_get_spec_contents_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_spec_contents - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_spec_contents - ] = mock_rpc - - request = {} - await client.get_spec_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_spec_contents(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_spec_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_spec_contents_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetSpecContentsRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.SpecContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.SpecContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - ) - response = await client.get_spec_contents(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetSpecContentsRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.SpecContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.SpecContents) - assert response.contents == b"contents_blob" - assert response.mime_type == "mime_type_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_spec_contents(request) -@pytest.mark.asyncio -async def test_get_spec_contents_async_from_dict(): - await test_get_spec_contents_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_spec_contents_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_spec_contents_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetSpecContentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - call.return_value = common_fields.SpecContents() - client.get_spec_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_spec_contents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_spec_contents_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_spec_contents_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetSpecContentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.SpecContents() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_spec_contents" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_spec_contents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetSpecContentsRequest.pb( + apihub_service.GetSpecContentsRequest() ) - await client.get_spec_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.SpecContents.to_json( + common_fields.SpecContents() + ) -def test_get_spec_contents_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetSpecContentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.SpecContents() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.SpecContents() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_spec_contents( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_spec_contents_flattened_error(): +def test_get_spec_contents_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetSpecContentsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_spec_contents( - apihub_service.GetSpecContentsRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_spec_contents(request) -@pytest.mark.asyncio -async def test_get_spec_contents_flattened_async(): - client = ApiHubAsyncClient( +def test_get_spec_contents_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_spec_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.SpecContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.SpecContents() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.SpecContents() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_spec_contents( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.SpecContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_spec_contents(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}:contents" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_spec_contents_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_spec_contents_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_spec_contents( + client.get_spec_contents( apihub_service.GetSpecContentsRequest(), name="name_value", ) +def test_get_spec_contents_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -6435,93 +5606,48 @@ async def test_get_spec_contents_flattened_error_async(): dict, ], ) -def test_list_specs(request_type, transport: str = "grpc"): +def test_list_specs_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListSpecsResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListSpecsResponse( next_page_token="next_page_token_value", ) - response = client.list_specs(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListSpecsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListSpecsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_specs(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSpecsPager) assert response.next_page_token == "next_page_token_value" -def test_list_specs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_specs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListSpecsRequest() - - -def test_list_specs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListSpecsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_specs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListSpecsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_specs_use_cached_wrapped_rpc(): +def test_list_specs_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -6537,6 +5663,7 @@ def test_list_specs_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.list_specs] = mock_rpc + request = {} client.list_specs(request) @@ -6550,196 +5677,233 @@ def test_list_specs_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_specs_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_specs_rest_required_fields(request_type=apihub_service.ListSpecsRequest): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListSpecsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_specs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListSpecsRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_specs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_specs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_specs - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_specs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_specs - ] = mock_rpc - - request = {} - await client.list_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - await client.list_specs(request) + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListSpecsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -@pytest.mark.asyncio -async def test_list_specs_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListSpecsRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Convert return value to protobuf type + return_value = apihub_service.ListSpecsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListSpecsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_specs(request) + response = client.list_specs(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListSpecsRequest() - assert args[0] == request + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSpecsAsyncPager) - assert response.next_page_token == "next_page_token_value" +def test_list_specs_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) -@pytest.mark.asyncio -async def test_list_specs_async_from_dict(): - await test_list_specs_async(request_type=dict) + unset_fields = transport.list_specs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -def test_list_specs_field_headers(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_specs_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_specs" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_specs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListSpecsRequest.pb( + apihub_service.ListSpecsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListSpecsRequest() - - request.parent = "parent_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListSpecsResponse.to_json( + apihub_service.ListSpecsResponse() + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - call.return_value = apihub_service.ListSpecsResponse() - client.list_specs(request) + request = apihub_service.ListSpecsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListSpecsResponse() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.list_specs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -@pytest.mark.asyncio -async def test_list_specs_field_headers_async(): - client = ApiHubAsyncClient( +def test_list_specs_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListSpecsRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListSpecsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListSpecsResponse() - ) - await client.list_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_specs(request) -def test_list_specs_flattened(): +def test_list_specs_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListSpecsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_specs( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListSpecsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListSpecsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_specs(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" + % client.transport._host, + args[1], + ) -def test_list_specs_flattened_error(): +def test_list_specs_rest_flattened_error(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -6751,60 +5915,18 @@ def test_list_specs_flattened_error(): ) -@pytest.mark.asyncio -async def test_list_specs_flattened_async(): - client = ApiHubAsyncClient( +def test_list_specs_rest_pager(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListSpecsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListSpecsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_specs( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_specs_flattened_error_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_specs( - apihub_service.ListSpecsRequest(), - parent="parent_value", - ) - - -def test_list_specs_pager(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListSpecsResponse( specs=[ common_fields.Spec(), @@ -6829,162 +5951,29 @@ def test_list_specs_pager(transport_name: str = "grpc"): common_fields.Spec(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_specs(request={}, retry=retry, timeout=timeout) + # Wrap the values into proper Response objs + response = tuple(apihub_service.ListSpecsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + pager = client.list_specs(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Spec) for i in results) - -def test_list_specs_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_specs), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - common_fields.Spec(), - ], - next_page_token="abc", - ), - apihub_service.ListSpecsResponse( - specs=[], - next_page_token="def", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - ], - next_page_token="ghi", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - ], - ), - RuntimeError, - ) - pages = list(client.list_specs(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_specs_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_specs), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - common_fields.Spec(), - ], - next_page_token="abc", - ), - apihub_service.ListSpecsResponse( - specs=[], - next_page_token="def", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - ], - next_page_token="ghi", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_specs( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Spec) for i in responses) - - -@pytest.mark.asyncio -async def test_list_specs_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_specs), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - common_fields.Spec(), - ], - next_page_token="abc", - ), - apihub_service.ListSpecsResponse( - specs=[], - next_page_token="def", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - ], - next_page_token="ghi", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_specs(request={}) - ).pages: - pages.append(page_) + pages = list(client.list_specs(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -6996,91 +5985,173 @@ async def test_list_specs_async_pages(): dict, ], ) -def test_update_spec(request_type, transport: str = "grpc"): +def test_update_spec_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - response = client.update_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateSpecRequest() - assert args[0] == request + # send a request that will satisfy transcoding + request_init = { + "spec": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + } + request_init["spec"] = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5", + "display_name": "display_name_value", + "spec_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, + "details": { + "open_api_spec_details": { + "format_": 1, + "version": "version_value", + "owner": {"display_name": "display_name_value", "email": "email_value"}, + }, + "description": "description_value", + }, + "source_uri": "source_uri_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "lint_response": { + "issues": [ + { + "code": "code_value", + "path": ["path_value1", "path_value2"], + "message": "message_value", + "severity": 1, + "range_": {"start": {"line": 424, "character": 941}, "end": {}}, + } + ], + "summary": [{"severity": 1, "count": 553}], + "state": 1, + "source": "source_value", + "linter": 1, + "create_time": {}, + }, + "attributes": {}, + "documentation": {"external_uri": "external_uri_value"}, + "parsing_mode": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateSpecRequest.meta.fields["spec"] + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] -def test_update_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateSpecRequest() + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_update_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + subfields_not_in_runtime = [] - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateSpecRequest() + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["spec"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateSpecRequest() + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["spec"][field])): + del request_init["spec"][field][i][subfield] + else: + del request_init["spec"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec( + name="name_value", + display_name="display_name_value", + source_uri="source_uri_value", + parsing_mode=common_fields.Spec.ParsingMode.RELAXED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_spec(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Spec) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.source_uri == "source_uri_value" + assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED -def test_update_spec_use_cached_wrapped_rpc(): +def test_update_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -7096,6 +6167,7 @@ def test_update_spec_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.update_spec] = mock_rpc + request = {} client.update_spec(request) @@ -7109,268 +6181,245 @@ def test_update_spec_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.update_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateSpecRequest() - - -@pytest.mark.asyncio -async def test_update_spec_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_spec_rest_required_fields( + request_type=apihub_service.UpdateSpecRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_spec - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_spec - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_spec(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_spec(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_spec._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_spec_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateSpecRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - ) - response = await client.update_spec(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateSpecRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_spec(request) -@pytest.mark.asyncio -async def test_update_spec_async_from_dict(): - await test_update_spec_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_spec_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateSpecRequest() - - request.spec.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - call.return_value = common_fields.Spec() - client.update_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "spec.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_spec._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "spec", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_spec_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_spec" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_spec" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateSpecRequest.pb( + apihub_service.UpdateSpecRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateSpecRequest() - - request.spec.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - await client.update_spec(request) + request = apihub_service.UpdateSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Spec() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.update_spec( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "spec.name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_update_spec_flattened(): +def test_update_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateSpecRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_spec( - spec=common_fields.Spec(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "spec": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + } + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].spec - mock_val = common_fields.Spec(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_spec(request) -def test_update_spec_flattened_error(): +def test_update_spec_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_spec( - apihub_service.UpdateSpecRequest(), - spec=common_fields.Spec(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -@pytest.mark.asyncio -async def test_update_spec_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Spec() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Spec() + # get arguments that satisfy an http rule for this method + sample_request = { + "spec": { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common_fields.Spec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_spec( + # get truthy value for each flattened field + mock_args = dict( spec=common_fields.Spec(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Spec.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].spec - mock_val = common_fields.Spec(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_spec_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{spec.name=projects/*/locations/*/apis/*/versions/*/specs/*}" + % client.transport._host, + args[1], + ) + + +def test_update_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_spec( + client.update_spec( apihub_service.UpdateSpecRequest(), spec=common_fields.Spec(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -7378,86 +6427,43 @@ async def test_update_spec_flattened_error_async(): dict, ], ) -def test_delete_spec(request_type, transport: str = "grpc"): +def test_delete_spec_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_spec(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteSpecRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_spec(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteSpecRequest() - - -def test_delete_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteSpecRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteSpecRequest( - name="name_value", - ) - - -def test_delete_spec_use_cached_wrapped_rpc(): +def test_delete_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -7473,6 +6479,7 @@ def test_delete_spec_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.delete_spec] = mock_rpc + request = {} client.delete_spec(request) @@ -7486,240 +6493,222 @@ def test_delete_spec_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteSpecRequest() - - -@pytest.mark.asyncio -async def test_delete_spec_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_spec_rest_required_fields( + request_type=apihub_service.DeleteSpecRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_spec - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_spec - ] = mock_rpc - - request = {} - await client.delete_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_delete_spec_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteSpecRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_spec(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteSpecRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_spec_async_from_dict(): - await test_delete_spec_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_spec._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_spec_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteSpecRequest() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - request.name = "name_value" + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - call.return_value = None - client.delete_spec(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_spec(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_spec_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_spec_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteSpecRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.delete_spec._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_spec_flattened(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_spec_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_spec" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteSpecRequest.pb( + apihub_service.DeleteSpecRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteSpecRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.delete_spec( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() -def test_delete_spec_flattened_error(): +def test_delete_spec_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteSpecRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_spec( - apihub_service.DeleteSpecRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_spec_flattened_async(): - client = ApiHubAsyncClient( + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_spec(request) + + +def test_delete_spec_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_spec( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_spec(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_spec_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_spec_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_spec( + client.delete_spec( apihub_service.DeleteSpecRequest(), name="name_value", ) +def test_delete_spec_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -7727,32 +6716,36 @@ async def test_delete_spec_flattened_error_async(): dict, ], ) -def test_get_api_operation(request_type, transport: str = "grpc"): +def test_get_api_operation_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiOperation( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiOperation( name="name_value", spec="spec_value", ) - response = client.get_api_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetApiOperationRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiOperation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_operation(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ApiOperation) @@ -7760,64 +6753,13 @@ def test_get_api_operation(request_type, transport: str = "grpc"): assert response.spec == "spec_value" -def test_get_api_operation_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api_operation() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiOperationRequest() - - -def test_get_api_operation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetApiOperationRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api_operation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiOperationRequest( - name="name_value", - ) - - -def test_get_api_operation_use_cached_wrapped_rpc(): +def test_get_api_operation_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -7835,6 +6777,7 @@ def test_get_api_operation_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_api_operation ] = mock_rpc + request = {} client.get_api_operation(request) @@ -7848,268 +6791,235 @@ def test_get_api_operation_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_api_operation_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiOperation( - name="name_value", - spec="spec_value", - ) - ) - response = await client.get_api_operation() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetApiOperationRequest() - - -@pytest.mark.asyncio -async def test_get_api_operation_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_api_operation_rest_required_fields( + request_type=apihub_service.GetApiOperationRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_api_operation - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_api_operation - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_api_operation(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_operation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_api_operation(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_operation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_api_operation_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetApiOperationRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiOperation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiOperation( - name="name_value", - spec="spec_value", - ) - ) - response = await client.get_api_operation(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetApiOperationRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ApiOperation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ApiOperation) - assert response.name == "name_value" - assert response.spec == "spec_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_operation(request) -@pytest.mark.asyncio -async def test_get_api_operation_async_from_dict(): - await test_get_api_operation_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_api_operation_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_api_operation_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetApiOperationRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - call.return_value = common_fields.ApiOperation() - client.get_api_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_api_operation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_api_operation_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_api_operation_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetApiOperationRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiOperation() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_api_operation" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_api_operation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetApiOperationRequest.pb( + apihub_service.GetApiOperationRequest() ) - await client.get_api_operation(request) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ApiOperation.to_json( + common_fields.ApiOperation() + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_api_operation_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetApiOperationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ApiOperation() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiOperation() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_api_operation( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_api_operation_flattened_error(): +def test_get_api_operation_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetApiOperationRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api_operation( - apihub_service.GetApiOperationRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_api_operation(request) -@pytest.mark.asyncio -async def test_get_api_operation_flattened_async(): - client = ApiHubAsyncClient( +def test_get_api_operation_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_operation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiOperation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiOperation() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiOperation() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_api_operation( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiOperation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_api_operation(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/operations/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_api_operation_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_api_operation_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_api_operation( + client.get_api_operation( apihub_service.GetApiOperationRequest(), name="name_value", ) +def test_get_api_operation_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -8117,99 +7027,48 @@ async def test_get_api_operation_flattened_error_async(): dict, ], ) -def test_list_api_operations(request_type, transport: str = "grpc"): +def test_list_api_operations_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApiOperationsResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApiOperationsResponse( next_page_token="next_page_token_value", ) - response = client.list_api_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListApiOperationsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApiOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_api_operations(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListApiOperationsPager) assert response.next_page_token == "next_page_token_value" -def test_list_api_operations_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_api_operations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApiOperationsRequest() - - -def test_list_api_operations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListApiOperationsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_api_operations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApiOperationsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_api_operations_use_cached_wrapped_rpc(): +def test_list_api_operations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -8229,6 +7088,7 @@ def test_list_api_operations_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_api_operations ] = mock_rpc + request = {} client.list_api_operations(request) @@ -8242,278 +7102,258 @@ def test_list_api_operations_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_api_operations_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApiOperationsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_api_operations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListApiOperationsRequest() - - -@pytest.mark.asyncio -async def test_list_api_operations_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_api_operations_rest_required_fields( + request_type=apihub_service.ListApiOperationsRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.list_api_operations - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_api_operations - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.list_api_operations(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_api_operations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.list_api_operations(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_api_operations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_api_operations_async( - transport: str = "grpc_asyncio", - request_type=apihub_service.ListApiOperationsRequest, -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApiOperationsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_api_operations(request) + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApiOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListApiOperationsRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListApiOperationsAsyncPager) - assert response.next_page_token == "next_page_token_value" + # Convert return value to protobuf type + return_value = apihub_service.ListApiOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_list_api_operations_async_from_dict(): - await test_list_api_operations_async(request_type=dict) + response = client.list_api_operations(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_api_operations_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListApiOperationsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - call.return_value = apihub_service.ListApiOperationsResponse() - client.list_api_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_list_api_operations_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_api_operations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_api_operations_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_api_operations_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListApiOperationsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApiOperationsResponse() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_api_operations" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_api_operations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListApiOperationsRequest.pb( + apihub_service.ListApiOperationsRequest() ) - await client.list_api_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListApiOperationsResponse.to_json( + apihub_service.ListApiOperationsResponse() + ) -def test_list_api_operations_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.ListApiOperationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListApiOperationsResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApiOperationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_api_operations( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_api_operations_flattened_error(): +def test_list_api_operations_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListApiOperationsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_api_operations( - apihub_service.ListApiOperationsRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_api_operations(request) -@pytest.mark.asyncio -async def test_list_api_operations_flattened_async(): - client = ApiHubAsyncClient( + +def test_list_api_operations_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListApiOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListApiOperationsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListApiOperationsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_api_operations( + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListApiOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_api_operations(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/operations" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_api_operations_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_api_operations_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_api_operations( + client.list_api_operations( apihub_service.ListApiOperationsRequest(), parent="parent_value", ) -def test_list_api_operations_pager(transport_name: str = "grpc"): +def test_list_api_operations_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListApiOperationsResponse( api_operations=[ common_fields.ApiOperation(), @@ -8538,205 +7378,74 @@ def test_list_api_operations_pager(transport_name: str = "grpc"): common_fields.ApiOperation(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListApiOperationsResponse.to_json(x) for x in response ) - pager = client.list_api_operations(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" + } - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_api_operations(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.ApiOperation) for i in results) - -def test_list_api_operations_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - next_page_token="abc", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[], - next_page_token="def", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - ], - next_page_token="ghi", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_api_operations(request={}).pages) + pages = list(client.list_api_operations(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.asyncio -async def test_list_api_operations_async_pager(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.GetDefinitionRequest, + dict, + ], +) +def test_get_definition_rest(request_type): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - next_page_token="abc", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[], - next_page_token="def", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - ], - next_page_token="ghi", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_api_operations( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.ApiOperation) for i in responses) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Definition( + name="name_value", + spec="spec_value", + type_=common_fields.Definition.Type.SCHEMA, + ) -@pytest.mark.asyncio -async def test_list_api_operations_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_api_operations), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - next_page_token="abc", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[], - next_page_token="def", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - ], - next_page_token="ghi", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_api_operations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetDefinitionRequest, - dict, - ], -) -def test_get_definition(request_type, transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Definition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Definition( - name="name_value", - spec="spec_value", - type_=common_fields.Definition.Type.SCHEMA, - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value response = client.get_definition(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDefinitionRequest() - assert args[0] == request - # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Definition) assert response.name == "name_value" @@ -8744,60 +7453,13 @@ def test_get_definition(request_type, transport: str = "grpc"): assert response.type_ == common_fields.Definition.Type.SCHEMA -def test_get_definition_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_definition() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDefinitionRequest() - - -def test_get_definition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetDefinitionRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_definition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDefinitionRequest( - name="name_value", - ) - - -def test_get_definition_use_cached_wrapped_rpc(): +def test_get_definition_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -8813,6 +7475,7 @@ def test_get_definition_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_definition] = mock_rpc + request = {} client.get_definition(request) @@ -8826,259 +7489,235 @@ def test_get_definition_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_definition_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Definition( - name="name_value", - spec="spec_value", - type_=common_fields.Definition.Type.SCHEMA, - ) - ) - response = await client.get_definition() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDefinitionRequest() - - -@pytest.mark.asyncio -async def test_get_definition_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_definition_rest_required_fields( + request_type=apihub_service.GetDefinitionRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_definition - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_definition - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_definition(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_definition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_definition(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_definition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_definition_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetDefinitionRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Definition() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Definition( - name="name_value", - spec="spec_value", - type_=common_fields.Definition.Type.SCHEMA, - ) - ) - response = await client.get_definition(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDefinitionRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Definition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Definition) - assert response.name == "name_value" - assert response.spec == "spec_value" - assert response.type_ == common_fields.Definition.Type.SCHEMA + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_definition(request) -@pytest.mark.asyncio -async def test_get_definition_async_from_dict(): - await test_get_definition_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_definition_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_definition_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDefinitionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - call.return_value = common_fields.Definition() - client.get_definition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_definition._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_definition_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_definition_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_definition" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_definition" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetDefinitionRequest.pb( + apihub_service.GetDefinitionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDefinitionRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Definition.to_json( common_fields.Definition() ) - await client.get_definition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_definition_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetDefinitionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Definition() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Definition() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_definition( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_definition_flattened_error(): +def test_get_definition_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetDefinitionRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_definition( - apihub_service.GetDefinitionRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_definition(request) -@pytest.mark.asyncio -async def test_get_definition_flattened_async(): - client = ApiHubAsyncClient( +def test_get_definition_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_definition), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Definition() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Definition() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Definition() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_definition( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Definition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_definition(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/definitions/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_definition_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_definition_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_definition( + client.get_definition( apihub_service.GetDefinitionRequest(), name="name_value", ) +def test_get_definition_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -9086,22 +7725,116 @@ async def test_get_definition_flattened_error_async(): dict, ], ) -def test_create_deployment(request_type, transport: str = "grpc"): +def test_create_deployment_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deployment"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "deployment_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "resource_uri": "resource_uri_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "api_versions": ["api_versions_value1", "api_versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "slo": {}, + "environment": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment( + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateDeploymentRequest.meta.fields["deployment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment( name="name_value", display_name="display_name_value", description="description_value", @@ -9109,13 +7842,17 @@ def test_create_deployment(request_type, transport: str = "grpc"): endpoints=["endpoints_value"], api_versions=["api_versions_value"], ) - response = client.create_deployment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateDeploymentRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_deployment(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Deployment) @@ -9127,66 +7864,13 @@ def test_create_deployment(request_type, transport: str = "grpc"): assert response.api_versions == ["api_versions_value"] -def test_create_deployment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDeploymentRequest() - - -def test_create_deployment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateDeploymentRequest( - parent="parent_value", - deployment_id="deployment_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_deployment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDeploymentRequest( - parent="parent_value", - deployment_id="deployment_id_value", - ) - - -def test_create_deployment_use_cached_wrapped_rpc(): +def test_create_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -9204,6 +7888,7 @@ def test_create_deployment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_deployment ] = mock_rpc + request = {} client.create_deployment(request) @@ -9217,293 +7902,233 @@ def test_create_deployment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_deployment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_create_deployment_rest_required_fields( + request_type=apihub_service.CreateDeploymentRequest, +): + transport_class = transports.ApiHubRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.create_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDeploymentRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_create_deployment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.create_deployment - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("deployment_id",)) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_deployment - ] = mock_rpc + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - request = {} - await client.create_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_deployment_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateDeploymentRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.create_deployment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateDeploymentRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_deployment(request) -@pytest.mark.asyncio -async def test_create_deployment_async_from_dict(): - await test_create_deployment_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_deployment_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateDeploymentRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - call.return_value = common_fields.Deployment() - client.create_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("deploymentId",)) + & set( + ( + "parent", + "deployment", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_deployment_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateDeploymentRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_deployment" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateDeploymentRequest.pb( + apihub_service.CreateDeploymentRequest() ) - await client.create_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Deployment.to_json( + common_fields.Deployment() + ) -def test_create_deployment_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.CreateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Deployment() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_deployment( - parent="parent_value", - deployment=common_fields.Deployment(name="name_value"), - deployment_id="deployment_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].deployment - mock_val = common_fields.Deployment(name="name_value") - assert arg == mock_val - arg = args[0].deployment_id - mock_val = "deployment_id_value" - assert arg == mock_val - - -def test_create_deployment_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateDeploymentRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deployment( - apihub_service.CreateDeploymentRequest(), - parent="parent_value", - deployment=common_fields.Deployment(name="name_value"), - deployment_id="deployment_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_deployment(request) -@pytest.mark.asyncio -async def test_create_deployment_flattened_async(): - client = ApiHubAsyncClient( +def test_create_deployment_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_deployment( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", deployment=common_fields.Deployment(name="name_value"), deployment_id="deployment_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].deployment - mock_val = common_fields.Deployment(name="name_value") - assert arg == mock_val - arg = args[0].deployment_id - mock_val = "deployment_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_deployment_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) + + +def test_create_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_deployment( + client.create_deployment( apihub_service.CreateDeploymentRequest(), parent="parent_value", deployment=common_fields.Deployment(name="name_value"), @@ -9511,6 +8136,12 @@ async def test_create_deployment_flattened_error_async(): ) +def test_create_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -9518,20 +8149,20 @@ async def test_create_deployment_flattened_error_async(): dict, ], ) -def test_get_deployment(request_type, transport: str = "grpc"): +def test_get_deployment_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment( name="name_value", display_name="display_name_value", description="description_value", @@ -9539,13 +8170,17 @@ def test_get_deployment(request_type, transport: str = "grpc"): endpoints=["endpoints_value"], api_versions=["api_versions_value"], ) - response = client.get_deployment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDeploymentRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_deployment(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Deployment) @@ -9557,65 +8192,18 @@ def test_get_deployment(request_type, transport: str = "grpc"): assert response.api_versions == ["api_versions_value"] -def test_get_deployment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. +def test_get_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client.get_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDeploymentRequest() - -def test_get_deployment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetDeploymentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_deployment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDeploymentRequest( - name="name_value", - ) - - -def test_get_deployment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.get_deployment in client._transport._wrapped_methods @@ -9626,6 +8214,7 @@ def test_get_deployment_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_deployment] = mock_rpc + request = {} client.get_deployment(request) @@ -9639,362 +8228,280 @@ def test_get_deployment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_deployment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.get_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDeploymentRequest() - - -@pytest.mark.asyncio -async def test_get_deployment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_deployment_rest_required_fields( + request_type=apihub_service.GetDeploymentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_deployment - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_deployment - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_deployment(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_deployment(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_deployment_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetDeploymentRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.get_deployment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDeploymentRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_deployment(request) -@pytest.mark.asyncio -async def test_get_deployment_async_from_dict(): - await test_get_deployment_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_deployment_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDeploymentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - call.return_value = common_fields.Deployment() - client.get_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_deployment_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_deployment" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetDeploymentRequest.pb( + apihub_service.GetDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDeploymentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Deployment.to_json( common_fields.Deployment() ) - await client.get_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - -def test_get_deployment_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Deployment() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_deployment( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_deployment_flattened_error(): +def test_get_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetDeploymentRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deployment( - apihub_service.GetDeploymentRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_deployment(request) -@pytest.mark.asyncio -async def test_get_deployment_flattened_async(): - client = ApiHubAsyncClient( +def test_get_deployment_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_deployment( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_deployment_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_deployment( + client.get_deployment( apihub_service.GetDeploymentRequest(), name="name_value", ) -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListDeploymentsRequest, +def test_get_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.ListDeploymentsRequest, dict, ], ) -def test_list_deployments(request_type, transport: str = "grpc"): +def test_list_deployments_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDeploymentsResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDeploymentsResponse( next_page_token="next_page_token_value", ) - response = client.list_deployments(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListDeploymentsRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_deployments(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDeploymentsPager) assert response.next_page_token == "next_page_token_value" -def test_list_deployments_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_deployments() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDeploymentsRequest() - - -def test_list_deployments_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListDeploymentsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_deployments(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDeploymentsRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_deployments_use_cached_wrapped_rpc(): +def test_list_deployments_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -10012,6 +8519,7 @@ def test_list_deployments_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_deployments ] = mock_rpc + request = {} client.list_deployments(request) @@ -10025,263 +8533,254 @@ def test_list_deployments_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_deployments_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDeploymentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_deployments() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDeploymentsRequest() - - -@pytest.mark.asyncio -async def test_list_deployments_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_deployments_rest_required_fields( + request_type=apihub_service.ListDeploymentsRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.list_deployments - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_deployments - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.list_deployments(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.list_deployments(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_deployments_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListDeploymentsRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDeploymentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDeploymentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_deployments(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListDeploymentsRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeploymentsAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_deployments(request) -@pytest.mark.asyncio -async def test_list_deployments_async_from_dict(): - await test_list_deployments_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_deployments_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_deployments_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListDeploymentsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - call.return_value = apihub_service.ListDeploymentsResponse() - client.list_deployments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_deployments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_deployments_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deployments_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_deployments" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_deployments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListDeploymentsRequest.pb( + apihub_service.ListDeploymentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListDeploymentsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListDeploymentsResponse.to_json( apihub_service.ListDeploymentsResponse() ) - await client.list_deployments(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = apihub_service.ListDeploymentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListDeploymentsResponse() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + client.list_deployments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + pre.assert_called_once() + post.assert_called_once() -def test_list_deployments_flattened(): + +def test_list_deployments_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListDeploymentsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDeploymentsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_deployments( - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_deployments(request) -def test_list_deployments_flattened_error(): +def test_list_deployments_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deployments( - apihub_service.ListDeploymentsRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_deployments_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDeploymentsResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDeploymentsResponse() + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDeploymentsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_deployments( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_deployments(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deployments" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_deployments_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_deployments_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_deployments( + client.list_deployments( apihub_service.ListDeploymentsRequest(), parent="parent_value", ) -def test_list_deployments_pager(transport_name: str = "grpc"): +def test_list_deployments_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListDeploymentsResponse( deployments=[ common_fields.Deployment(), @@ -10306,162 +8805,29 @@ def test_list_deployments_pager(transport_name: str = "grpc"): common_fields.Deployment(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListDeploymentsResponse.to_json(x) for x in response ) - pager = client.list_deployments(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_deployments(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Deployment) for i in results) - -def test_list_deployments_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - common_fields.Deployment(), - ], - next_page_token="abc", - ), - apihub_service.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - ], - next_page_token="ghi", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - ], - ), - RuntimeError, - ) - pages = list(client.list_deployments(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_deployments_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deployments), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - common_fields.Deployment(), - ], - next_page_token="abc", - ), - apihub_service.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - ], - next_page_token="ghi", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_deployments( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Deployment) for i in responses) - - -@pytest.mark.asyncio -async def test_list_deployments_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deployments), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - common_fields.Deployment(), - ], - next_page_token="abc", - ), - apihub_service.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - ], - next_page_token="ghi", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_deployments(request={}) - ).pages: - pages.append(page_) + pages = list(client.list_deployments(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -10473,101 +8839,154 @@ async def test_list_deployments_async_pages(): dict, ], ) -def test_update_deployment(request_type, transport: str = "grpc"): +def test_update_deployment_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - response = client.update_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateDeploymentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request_init["deployment"] = { + "name": "projects/sample1/locations/sample2/deployments/sample3", + "display_name": "display_name_value", + "description": "description_value", + "documentation": {"external_uri": "external_uri_value"}, + "deployment_type": { + "enum_values": { + "values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ] + }, + "string_values": {"values": ["values_value1", "values_value2"]}, + "json_values": {}, + "attribute": "attribute_value", + }, + "resource_uri": "resource_uri_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "api_versions": ["api_versions_value1", "api_versions_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "slo": {}, + "environment": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateDeploymentRequest.meta.fields["deployment"] -def test_update_deployment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDeploymentRequest() + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_update_deployment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateDeploymentRequest() + subfields_not_in_runtime = [] - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment( + name="name_value", + display_name="display_name_value", + description="description_value", + resource_uri="resource_uri_value", + endpoints=["endpoints_value"], + api_versions=["api_versions_value"], ) - client.update_deployment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDeploymentRequest() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common_fields.Deployment) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.resource_uri == "resource_uri_value" + assert response.endpoints == ["endpoints_value"] + assert response.api_versions == ["api_versions_value"] -def test_update_deployment_use_cached_wrapped_rpc(): +def test_update_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -10585,6 +9004,7 @@ def test_update_deployment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_deployment ] = mock_rpc + request = {} client.update_deployment(request) @@ -10598,290 +9018,245 @@ def test_update_deployment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_deployment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.update_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDeploymentRequest() - - -@pytest.mark.asyncio -async def test_update_deployment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_deployment_rest_required_fields( + request_type=apihub_service.UpdateDeploymentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_deployment - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_deployment - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_deployment(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_deployment(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_deployment_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateDeploymentRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - ) - response = await client.update_deployment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateDeploymentRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_deployment(request) -@pytest.mark.asyncio -async def test_update_deployment_async_from_dict(): - await test_update_deployment_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_deployment_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateDeploymentRequest() - - request.deployment.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - call.return_value = common_fields.Deployment() - client.update_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "deployment.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "deployment", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_deployment_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateDeploymentRequest() - - request.deployment.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_deployment" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateDeploymentRequest.pb( + apihub_service.UpdateDeploymentRequest() ) - await client.update_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "deployment.name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Deployment.to_json( + common_fields.Deployment() + ) -def test_update_deployment_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.UpdateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Deployment() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.update_deployment( - deployment=common_fields.Deployment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].deployment - mock_val = common_fields.Deployment(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_update_deployment_flattened_error(): +def test_update_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateDeploymentRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deployment( - apihub_service.UpdateDeploymentRequest(), - deployment=common_fields.Deployment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_deployment(request) -@pytest.mark.asyncio -async def test_update_deployment_flattened_async(): - client = ApiHubAsyncClient( +def test_update_deployment_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Deployment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Deployment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_deployment( + # get arguments that satisfy an http rule for this method + sample_request = { + "deployment": { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( deployment=common_fields.Deployment(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].deployment - mock_val = common_fields.Deployment(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_deployment_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_deployment( + client.update_deployment( apihub_service.UpdateDeploymentRequest(), deployment=common_fields.Deployment(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -10889,92 +9264,41 @@ async def test_update_deployment_flattened_error_async(): dict, ], ) -def test_delete_deployment(request_type, transport: str = "grpc"): +def test_delete_deployment_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_deployment(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteDeploymentRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deployment(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_deployment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDeploymentRequest() - - -def test_delete_deployment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteDeploymentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_deployment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDeploymentRequest( - name="name_value", - ) - - -def test_delete_deployment_use_cached_wrapped_rpc(): +def test_delete_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -10992,6 +9316,7 @@ def test_delete_deployment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_deployment ] = mock_rpc + request = {} client.delete_deployment(request) @@ -11005,252 +9330,220 @@ def test_delete_deployment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_deployment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_deployment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDeploymentRequest() - - -@pytest.mark.asyncio -async def test_delete_deployment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_deployment_rest_required_fields( + request_type=apihub_service.DeleteDeploymentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_deployment - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_deployment - ] = mock_rpc - - request = {} - await client.delete_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_delete_deployment_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteDeploymentRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_deployment(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteDeploymentRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_deployment_async_from_dict(): - await test_delete_deployment_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_deployment_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteDeploymentRequest() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - request.name = "name_value" + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - call.return_value = None - client.delete_deployment(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_deployment(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_deployment_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_deployment_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteDeploymentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.delete_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_deployment_flattened(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deployment_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_deployment( - name="name_value", + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_deployment" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteDeploymentRequest.pb( + apihub_service.DeleteDeploymentRequest() ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() -def test_delete_deployment_flattened_error(): +def test_delete_deployment_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteDeploymentRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deployment( - apihub_service.DeleteDeploymentRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deployment(request) -@pytest.mark.asyncio -async def test_delete_deployment_flattened_async(): - client = ApiHubAsyncClient( +def test_delete_deployment_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_deployment( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_deployment_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_deployment_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_deployment( + client.delete_deployment( apihub_service.DeleteDeploymentRequest(), name="name_value", ) +def test_delete_deployment_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -11258,20 +9551,107 @@ async def test_delete_deployment_flattened_error_async(): dict, ], ) -def test_create_attribute(request_type, transport: str = "grpc"): +def test_create_attribute_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["attribute"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "definition_type": 1, + "scope": 1, + "data_type": 1, + "allowed_values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ], + "cardinality": 1172, + "mandatory": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateAttributeRequest.meta.fields["attribute"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["attribute"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["attribute"][field])): + del request_init["attribute"][field][i][subfield] + else: + del request_init["attribute"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute( name="name_value", display_name="display_name_value", description="description_value", @@ -11281,13 +9661,17 @@ def test_create_attribute(request_type, transport: str = "grpc"): cardinality=1172, mandatory=True, ) - response = client.create_attribute(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateAttributeRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_attribute(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Attribute) @@ -11304,62 +9688,13 @@ def test_create_attribute(request_type, transport: str = "grpc"): assert response.mandatory is True -def test_create_attribute_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateAttributeRequest() - - -def test_create_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateAttributeRequest( - parent="parent_value", - attribute_id="attribute_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateAttributeRequest( - parent="parent_value", - attribute_id="attribute_id_value", - ) - - -def test_create_attribute_use_cached_wrapped_rpc(): +def test_create_attribute_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -11377,6 +9712,7 @@ def test_create_attribute_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_attribute ] = mock_rpc + request = {} client.create_attribute(request) @@ -11390,290 +9726,232 @@ def test_create_attribute_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_attribute_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.create_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateAttributeRequest() +def test_create_attribute_rest_required_fields( + request_type=apihub_service.CreateAttributeRequest, +): + transport_class = transports.ApiHubRestTransport + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) -@pytest.mark.asyncio -async def test_create_attribute_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify fields with default values are dropped - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Ensure method has been cached - assert ( - client._client._transport.create_attribute - in client._client._transport._wrapped_methods - ) + # verify required fields with default values are now present - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_attribute - ] = mock_rpc - - request = {} - await client.create_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_attribute(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_attribute._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("attribute_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_attribute_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateAttributeRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.create_attribute(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateAttributeRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_attribute(request) -@pytest.mark.asyncio -async def test_create_attribute_async_from_dict(): - await test_create_attribute_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_attribute_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateAttributeRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - call.return_value = common_fields.Attribute() - client.create_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_attribute._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("attributeId",)) + & set( + ( + "parent", + "attribute", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_attribute_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_attribute" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_attribute" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateAttributeRequest.pb( + apihub_service.CreateAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateAttributeRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Attribute.to_json( common_fields.Attribute() ) - await client.create_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_create_attribute_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.CreateAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Attribute() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_attribute( - parent="parent_value", - attribute=common_fields.Attribute(name="name_value"), - attribute_id="attribute_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].attribute - mock_val = common_fields.Attribute(name="name_value") - assert arg == mock_val - arg = args[0].attribute_id - mock_val = "attribute_id_value" - assert arg == mock_val - - -def test_create_attribute_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateAttributeRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_attribute( - apihub_service.CreateAttributeRequest(), - parent="parent_value", - attribute=common_fields.Attribute(name="name_value"), - attribute_id="attribute_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_attribute(request) -@pytest.mark.asyncio -async def test_create_attribute_flattened_async(): - client = ApiHubAsyncClient( +def test_create_attribute_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_attribute( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", attribute=common_fields.Attribute(name="name_value"), attribute_id="attribute_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_attribute(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].attribute - mock_val = common_fields.Attribute(name="name_value") - assert arg == mock_val - arg = args[0].attribute_id - mock_val = "attribute_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_attribute_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, + args[1], + ) + + +def test_create_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_attribute( + client.create_attribute( apihub_service.CreateAttributeRequest(), parent="parent_value", attribute=common_fields.Attribute(name="name_value"), @@ -11681,6 +9959,12 @@ async def test_create_attribute_flattened_error_async(): ) +def test_create_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -11688,20 +9972,20 @@ async def test_create_attribute_flattened_error_async(): dict, ], ) -def test_get_attribute(request_type, transport: str = "grpc"): +def test_get_attribute_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute( name="name_value", display_name="display_name_value", description="description_value", @@ -11711,13 +9995,17 @@ def test_get_attribute(request_type, transport: str = "grpc"): cardinality=1172, mandatory=True, ) - response = client.get_attribute(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetAttributeRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_attribute(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Attribute) @@ -11734,60 +10022,13 @@ def test_get_attribute(request_type, transport: str = "grpc"): assert response.mandatory is True -def test_get_attribute_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetAttributeRequest() - - -def test_get_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetAttributeRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetAttributeRequest( - name="name_value", - ) - - -def test_get_attribute_use_cached_wrapped_rpc(): +def test_get_attribute_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -11803,6 +10044,7 @@ def test_get_attribute_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_attribute] = mock_rpc + request = {} client.get_attribute(request) @@ -11816,277 +10058,232 @@ def test_get_attribute_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_attribute_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.get_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetAttributeRequest() - - -@pytest.mark.asyncio -async def test_get_attribute_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_attribute_rest_required_fields( + request_type=apihub_service.GetAttributeRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_attribute - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_attribute - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_attribute(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_attribute(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_attribute_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetAttributeRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.get_attribute(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetAttributeRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_attribute(request) -@pytest.mark.asyncio -async def test_get_attribute_async_from_dict(): - await test_get_attribute_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_attribute_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetAttributeRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - call.return_value = common_fields.Attribute() - client.get_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_attribute._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_attribute_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_attribute" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_attribute" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetAttributeRequest.pb( + apihub_service.GetAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetAttributeRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Attribute.to_json( common_fields.Attribute() ) - await client.get_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_attribute_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.GetAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Attribute() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_attribute( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_attribute_flattened_error(): +def test_get_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetAttributeRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_attribute( - apihub_service.GetAttributeRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_attribute(request) -@pytest.mark.asyncio -async def test_get_attribute_flattened_async(): - client = ApiHubAsyncClient( +def test_get_attribute_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_attribute( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/attributes/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_attribute(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_attribute_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_attribute( + client.get_attribute( apihub_service.GetAttributeRequest(), name="name_value", ) +def test_get_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -12094,20 +10291,109 @@ async def test_get_attribute_flattened_error_async(): dict, ], ) -def test_update_attribute(request_type, transport: str = "grpc"): +def test_update_attribute_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} + } + request_init["attribute"] = { + "name": "projects/sample1/locations/sample2/attributes/sample3", + "display_name": "display_name_value", + "description": "description_value", + "definition_type": 1, + "scope": 1, + "data_type": 1, + "allowed_values": [ + { + "id": "id_value", + "display_name": "display_name_value", + "description": "description_value", + "immutable": True, + } + ], + "cardinality": 1172, + "mandatory": True, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateAttributeRequest.meta.fields["attribute"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["attribute"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["attribute"][field])): + del request_init["attribute"][field][i][subfield] + else: + del request_init["attribute"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute( name="name_value", display_name="display_name_value", description="description_value", @@ -12117,13 +10403,17 @@ def test_update_attribute(request_type, transport: str = "grpc"): cardinality=1172, mandatory=True, ) - response = client.update_attribute(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateAttributeRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_attribute(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Attribute) @@ -12140,56 +10430,13 @@ def test_update_attribute(request_type, transport: str = "grpc"): assert response.mandatory is True -def test_update_attribute_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateAttributeRequest() - - -def test_update_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateAttributeRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateAttributeRequest() - - -def test_update_attribute_use_cached_wrapped_rpc(): +def test_update_attribute_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -12207,6 +10454,7 @@ def test_update_attribute_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_attribute ] = mock_rpc + request = {} client.update_attribute(request) @@ -12220,287 +10468,245 @@ def test_update_attribute_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_attribute_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.update_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateAttributeRequest() - - -@pytest.mark.asyncio -async def test_update_attribute_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_attribute_rest_required_fields( + request_type=apihub_service.UpdateAttributeRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_attribute - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_attribute - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_attribute(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_attribute(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_attribute._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_attribute_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateAttributeRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - ) - response = await client.update_attribute(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateAttributeRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_attribute(request) -@pytest.mark.asyncio -async def test_update_attribute_async_from_dict(): - await test_update_attribute_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_attribute_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateAttributeRequest() - - request.attribute.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - call.return_value = common_fields.Attribute() - client.update_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "attribute.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_attribute._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "attribute", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_attribute_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_attribute" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_attribute" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateAttributeRequest.pb( + apihub_service.UpdateAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateAttributeRequest() - - request.attribute.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Attribute.to_json( common_fields.Attribute() ) - await client.update_attribute(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "attribute.name=name_value", - ) in kw["metadata"] - - -def test_update_attribute_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.UpdateAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Attribute() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.update_attribute( - attribute=common_fields.Attribute(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].attribute - mock_val = common_fields.Attribute(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_update_attribute_flattened_error(): +def test_update_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateAttributeRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_attribute( - apihub_service.UpdateAttributeRequest(), - attribute=common_fields.Attribute(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_attribute(request) -@pytest.mark.asyncio -async def test_update_attribute_flattened_async(): - client = ApiHubAsyncClient( +def test_update_attribute_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Attribute() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Attribute() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Attribute() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_attribute( + # get arguments that satisfy an http rule for this method + sample_request = { + "attribute": { + "name": "projects/sample1/locations/sample2/attributes/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( attribute=common_fields.Attribute(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Attribute.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_attribute(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].attribute - mock_val = common_fields.Attribute(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_attribute_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{attribute.name=projects/*/locations/*/attributes/*}" + % client.transport._host, + args[1], + ) + + +def test_update_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_attribute( + client.update_attribute( apihub_service.UpdateAttributeRequest(), attribute=common_fields.Attribute(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -12508,86 +10714,41 @@ async def test_update_attribute_flattened_error_async(): dict, ], ) -def test_delete_attribute(request_type, transport: str = "grpc"): +def test_delete_attribute_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_attribute(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteAttributeRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_attribute(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_attribute_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteAttributeRequest() - - -def test_delete_attribute_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteAttributeRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_attribute(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteAttributeRequest( - name="name_value", - ) - - -def test_delete_attribute_use_cached_wrapped_rpc(): +def test_delete_attribute_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -12605,6 +10766,7 @@ def test_delete_attribute_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_attribute ] = mock_rpc + request = {} client.delete_attribute(request) @@ -12618,240 +10780,219 @@ def test_delete_attribute_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_attribute_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_attribute() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteAttributeRequest() - - -@pytest.mark.asyncio -async def test_delete_attribute_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_attribute_rest_required_fields( + request_type=apihub_service.DeleteAttributeRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_attribute - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_attribute - ] = mock_rpc - - request = {} - await client.delete_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_delete_attribute_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteAttributeRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_attribute(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteAttributeRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_attribute_async_from_dict(): - await test_delete_attribute_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_attribute._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_attribute_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteAttributeRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - call.return_value = None - client.delete_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_attribute_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteAttributeRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - request.name = "name_value" + response = client.delete_attribute(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_attribute(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] +def test_delete_attribute_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_attribute._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_attribute_flattened(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_attribute_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_attribute" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteAttributeRequest.pb( + apihub_service.DeleteAttributeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = apihub_service.DeleteAttributeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.delete_attribute( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() -def test_delete_attribute_flattened_error(): +def test_delete_attribute_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteAttributeRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_attribute( - apihub_service.DeleteAttributeRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_attribute(request) -@pytest.mark.asyncio -async def test_delete_attribute_flattened_async(): - client = ApiHubAsyncClient( +def test_delete_attribute_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_attribute), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/attributes/sample3" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_attribute( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_attribute(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_attribute_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_attribute_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_attribute( + client.delete_attribute( apihub_service.DeleteAttributeRequest(), name="name_value", ) +def test_delete_attribute_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -12859,93 +11000,46 @@ async def test_delete_attribute_flattened_error_async(): dict, ], ) -def test_list_attributes(request_type, transport: str = "grpc"): +def test_list_attributes_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListAttributesResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListAttributesResponse( next_page_token="next_page_token_value", ) - response = client.list_attributes(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListAttributesRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_attributes(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAttributesPager) assert response.next_page_token == "next_page_token_value" -def test_list_attributes_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_attributes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListAttributesRequest() - - -def test_list_attributes_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListAttributesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_attributes(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListAttributesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_attributes_use_cached_wrapped_rpc(): +def test_list_attributes_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -12961,6 +11055,7 @@ def test_list_attributes_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.list_attributes] = mock_rpc + request = {} client.list_attributes(request) @@ -12974,263 +11069,253 @@ def test_list_attributes_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_attributes_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListAttributesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_attributes() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListAttributesRequest() - - -@pytest.mark.asyncio -async def test_list_attributes_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_attributes_rest_required_fields( + request_type=apihub_service.ListAttributesRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.list_attributes - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_attributes - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.list_attributes(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_attributes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.list_attributes(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_attributes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_attributes_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListAttributesRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListAttributesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListAttributesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_attributes(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListAttributesRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAttributesAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_attributes(request) -@pytest.mark.asyncio -async def test_list_attributes_async_from_dict(): - await test_list_attributes_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_attributes_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_attributes_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListAttributesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - call.return_value = apihub_service.ListAttributesResponse() - client.list_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_attributes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_attributes_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_attributes_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_attributes" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_attributes" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListAttributesRequest.pb( + apihub_service.ListAttributesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListAttributesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListAttributesResponse.to_json( apihub_service.ListAttributesResponse() ) - await client.list_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - -def test_list_attributes_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.ListAttributesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListAttributesResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListAttributesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_attributes( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_attributes_flattened_error(): +def test_list_attributes_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListAttributesRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_attributes( - apihub_service.ListAttributesRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_attributes(request) -@pytest.mark.asyncio -async def test_list_attributes_flattened_async(): - client = ApiHubAsyncClient( +def test_list_attributes_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListAttributesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListAttributesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListAttributesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_attributes( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListAttributesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_attributes(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_attributes_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_attributes_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_attributes( + client.list_attributes( apihub_service.ListAttributesRequest(), parent="parent_value", ) -def test_list_attributes_pager(transport_name: str = "grpc"): +def test_list_attributes_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListAttributesResponse( attributes=[ common_fields.Attribute(), @@ -13255,262 +11340,80 @@ def test_list_attributes_pager(transport_name: str = "grpc"): common_fields.Attribute(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListAttributesResponse.to_json(x) for x in response ) - pager = client.list_attributes(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_attributes(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Attribute) for i in results) + pages = list(client.list_attributes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + -def test_list_attributes_pages(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + apihub_service.SearchResourcesRequest, + dict, + ], +) +def test_search_resources_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_attributes), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - common_fields.Attribute(), - ], - next_page_token="abc", - ), - apihub_service.ListAttributesResponse( - attributes=[], - next_page_token="def", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - ], - next_page_token="ghi", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - ], - ), - RuntimeError, + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.SearchResourcesResponse( + next_page_token="next_page_token_value", ) - pages = list(client.list_attributes(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.SearchResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_list_attributes_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_attributes), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - common_fields.Attribute(), - ], - next_page_token="abc", - ), - apihub_service.ListAttributesResponse( - attributes=[], - next_page_token="def", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - ], - next_page_token="ghi", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_attributes( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Attribute) for i in responses) - - -@pytest.mark.asyncio -async def test_list_attributes_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_attributes), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - common_fields.Attribute(), - ], - next_page_token="abc", - ), - apihub_service.ListAttributesResponse( - attributes=[], - next_page_token="def", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - ], - next_page_token="ghi", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_attributes(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.SearchResourcesRequest, - dict, - ], -) -def test_search_resources(request_type, transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.SearchResourcesResponse( - next_page_token="next_page_token_value", - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value response = client.search_resources(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.SearchResourcesRequest() - assert args[0] == request - # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchResourcesPager) assert response.next_page_token == "next_page_token_value" -def test_search_resources_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.search_resources() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.SearchResourcesRequest() - - -def test_search_resources_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.SearchResourcesRequest( - location="location_value", - query="query_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.search_resources(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.SearchResourcesRequest( - location="location_value", - query="query_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_search_resources_use_cached_wrapped_rpc(): +def test_search_resources_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -13528,6 +11431,7 @@ def test_search_resources_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.search_resources ] = mock_rpc + request = {} client.search_resources(request) @@ -13541,273 +11445,252 @@ def test_search_resources_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_search_resources_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.SearchResourcesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.search_resources() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.SearchResourcesRequest() - - -@pytest.mark.asyncio -async def test_search_resources_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_search_resources_rest_required_fields( + request_type=apihub_service.SearchResourcesRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.search_resources - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.search_resources - ] = mock_rpc - - request = {} - await client.search_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.search_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_search_resources_async( - transport: str = "grpc_asyncio", request_type=apihub_service.SearchResourcesRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["location"] = "" + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.SearchResourcesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.search_resources(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.SearchResourcesRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchResourcesAsyncPager) - assert response.next_page_token == "next_page_token_value" + # verify required fields with default values are now present + jsonified_request["location"] = "location_value" + jsonified_request["query"] = "query_value" -@pytest.mark.asyncio -async def test_search_resources_async_from_dict(): - await test_search_resources_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "location" in jsonified_request + assert jsonified_request["location"] == "location_value" + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" -def test_search_resources_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.SearchResourcesRequest() - - request.location = "location_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - call.return_value = apihub_service.SearchResourcesResponse() - client.search_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "location=location_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_search_resources_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Designate an appropriate value for the returned response. + return_value = apihub_service.SearchResourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.SearchResourcesRequest() + response_value = Response() + response_value.status_code = 200 - request.location = "location_value" + # Convert return value to protobuf type + return_value = apihub_service.SearchResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.SearchResourcesResponse() - ) - await client.search_resources(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.search_resources(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "location=location_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_search_resources_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_search_resources_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.SearchResourcesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_resources( - location="location_value", - query="query_value", + unset_fields = transport.search_resources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "location", + "query", + ) ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" - assert arg == mock_val - arg = args[0].query - mock_val = "query_value" - assert arg == mock_val + ) -def test_search_resources_flattened_error(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_resources_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_search_resources" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_search_resources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.SearchResourcesRequest.pb( + apihub_service.SearchResourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.SearchResourcesResponse.to_json( + apihub_service.SearchResourcesResponse() + ) + + request = apihub_service.SearchResourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.SearchResourcesResponse() + client.search_resources( - apihub_service.SearchResourcesRequest(), - location="location_value", - query="query_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() + -@pytest.mark.asyncio -async def test_search_resources_flattened_async(): - client = ApiHubAsyncClient( +def test_search_resources_rest_bad_request( + transport: str = "rest", request_type=apihub_service.SearchResourcesRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.SearchResourcesResponse() + # send a request that will satisfy transcoding + request_init = {"location": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.SearchResourcesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_resources( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_resources(request) + + +def test_search_resources_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.SearchResourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"location": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( location="location_value", query="query_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.SearchResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.search_resources(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = "location_value" - assert arg == mock_val - arg = args[0].query - mock_val = "query_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_search_resources_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{location=projects/*/locations/*}:searchResources" + % client.transport._host, + args[1], + ) + + +def test_search_resources_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.search_resources( + client.search_resources( apihub_service.SearchResourcesRequest(), location="location_value", query="query_value", ) -def test_search_resources_pager(transport_name: str = "grpc"): +def test_search_resources_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.SearchResourcesResponse( search_results=[ apihub_service.SearchResult(), @@ -13832,162 +11715,29 @@ def test_search_resources_pager(transport_name: str = "grpc"): apihub_service.SearchResult(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("location", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.SearchResourcesResponse.to_json(x) for x in response ) - pager = client.search_resources(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"location": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.search_resources(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, apihub_service.SearchResult) for i in results) - -def test_search_resources_pages(transport_name: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.search_resources), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - next_page_token="abc", - ), - apihub_service.SearchResourcesResponse( - search_results=[], - next_page_token="def", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - ], - next_page_token="ghi", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - ), - RuntimeError, - ) - pages = list(client.search_resources(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_search_resources_async_pager(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_resources), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - next_page_token="abc", - ), - apihub_service.SearchResourcesResponse( - search_results=[], - next_page_token="def", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - ], - next_page_token="ghi", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_resources( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, apihub_service.SearchResult) for i in responses) - - -@pytest.mark.asyncio -async def test_search_resources_async_pages(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_resources), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - next_page_token="abc", - ), - apihub_service.SearchResourcesResponse( - search_results=[], - next_page_token="def", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - ], - next_page_token="ghi", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_resources(request={}) - ).pages: - pages.append(page_) + pages = list(client.search_resources(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -13999,35 +11749,115 @@ async def test_search_resources_async_pages(): dict, ], ) -def test_create_external_api(request_type, transport: str = "grpc"): +def test_create_external_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["external_api"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "paths": ["paths_value1", "paths_value2"], + "documentation": {"external_uri": "external_uri_value"}, + "attributes": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateExternalApiRequest.meta.fields["external_api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["external_api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["external_api"][field])): + del request_init["external_api"][field][i][subfield] + else: + del request_init["external_api"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi( + name="name_value", + display_name="display_name_value", + description="description_value", + endpoints=["endpoints_value"], paths=["paths_value"], ) - response = client.create_external_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateExternalApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_external_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ExternalApi) @@ -14038,66 +11868,13 @@ def test_create_external_api(request_type, transport: str = "grpc"): assert response.paths == ["paths_value"] -def test_create_external_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateExternalApiRequest() - - -def test_create_external_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateExternalApiRequest( - parent="parent_value", - external_api_id="external_api_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_external_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateExternalApiRequest( - parent="parent_value", - external_api_id="external_api_id_value", - ) - - -def test_create_external_api_use_cached_wrapped_rpc(): +def test_create_external_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -14117,6 +11894,7 @@ def test_create_external_api_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_external_api ] = mock_rpc + request = {} client.create_external_api(request) @@ -14130,291 +11908,233 @@ def test_create_external_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_external_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.create_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateExternalApiRequest() - - -@pytest.mark.asyncio -async def test_create_external_api_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_external_api - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_external_api - ] = mock_rpc - - request = {} - await client.create_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_create_external_api_async( - transport: str = "grpc_asyncio", +def test_create_external_api_rest_required_fields( request_type=apihub_service.CreateExternalApiRequest, ): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.ApiHubRestTransport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.create_external_api(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateExternalApiRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -@pytest.mark.asyncio -async def test_create_external_api_async_from_dict(): - await test_create_external_api_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_external_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("external_api_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -def test_create_external_api_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateExternalApiRequest() - - request.parent = "parent_value" + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - call.return_value = common_fields.ExternalApi() - client.create_external_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_external_api(request) -@pytest.mark.asyncio -async def test_create_external_api_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateExternalApiRequest() - request.parent = "parent_value" +def test_create_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() + unset_fields = transport.create_external_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("externalApiId",)) + & set( + ( + "parent", + "externalApi", + ) ) - await client.create_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + ) -def test_create_external_api_flattened(): - client = ApiHubClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_external_api( - parent="parent_value", - external_api=common_fields.ExternalApi(name="name_value"), - external_api_id="external_api_id_value", + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_external_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_create_external_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateExternalApiRequest.pb( + apihub_service.CreateExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ExternalApi.to_json( + common_fields.ExternalApi() ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].external_api - mock_val = common_fields.ExternalApi(name="name_value") - assert arg == mock_val - arg = args[0].external_api_id - mock_val = "external_api_id_value" - assert arg == mock_val - - -def test_create_external_api_flattened_error(): + request = apihub_service.CreateExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ExternalApi() + + client.create_external_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateExternalApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_external_api( - apihub_service.CreateExternalApiRequest(), - parent="parent_value", - external_api=common_fields.ExternalApi(name="name_value"), - external_api_id="external_api_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_external_api(request) -@pytest.mark.asyncio -async def test_create_external_api_flattened_async(): - client = ApiHubAsyncClient( +def test_create_external_api_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_external_api( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", external_api=common_fields.ExternalApi(name="name_value"), external_api_id="external_api_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_external_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].external_api - mock_val = common_fields.ExternalApi(name="name_value") - assert arg == mock_val - arg = args[0].external_api_id - mock_val = "external_api_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_external_api_flattened_error_async(): - client = ApiHubAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/externalApis" + % client.transport._host, + args[1], + ) + + +def test_create_external_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_external_api( + client.create_external_api( apihub_service.CreateExternalApiRequest(), parent="parent_value", external_api=common_fields.ExternalApi(name="name_value"), @@ -14422,6 +12142,12 @@ async def test_create_external_api_flattened_error_async(): ) +def test_create_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -14429,33 +12155,37 @@ async def test_create_external_api_flattened_error_async(): dict, ], ) -def test_get_external_api(request_type, transport: str = "grpc"): +def test_get_external_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi( name="name_value", display_name="display_name_value", description="description_value", endpoints=["endpoints_value"], paths=["paths_value"], ) - response = client.get_external_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetExternalApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_external_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ExternalApi) @@ -14466,60 +12196,13 @@ def test_get_external_api(request_type, transport: str = "grpc"): assert response.paths == ["paths_value"] -def test_get_external_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetExternalApiRequest() - - -def test_get_external_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetExternalApiRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_external_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetExternalApiRequest( - name="name_value", - ) - - -def test_get_external_api_use_cached_wrapped_rpc(): +def test_get_external_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -14537,6 +12220,7 @@ def test_get_external_api_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_external_api ] = mock_rpc + request = {} client.get_external_api(request) @@ -14550,265 +12234,233 @@ def test_get_external_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_external_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.get_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetExternalApiRequest() - - -@pytest.mark.asyncio -async def test_get_external_api_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_external_api_rest_required_fields( + request_type=apihub_service.GetExternalApiRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_external_api - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_external_api - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_external_api(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_external_api(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_external_api_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetExternalApiRequest -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.get_external_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetExternalApiRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_external_api(request) -@pytest.mark.asyncio -async def test_get_external_api_async_from_dict(): - await test_get_external_api_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_external_api_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetExternalApiRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - call.return_value = common_fields.ExternalApi() - client.get_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_external_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_external_api_field_headers_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) + client = ApiHubClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_external_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_get_external_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetExternalApiRequest.pb( + apihub_service.GetExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetExternalApiRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ExternalApi.to_json( common_fields.ExternalApi() ) - await client.get_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + request = apihub_service.GetExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ExternalApi() -def test_get_external_api_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_external_api( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_external_api_flattened_error(): +def test_get_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetExternalApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_external_api( - apihub_service.GetExternalApiRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_external_api(request) -@pytest.mark.asyncio -async def test_get_external_api_flattened_async(): - client = ApiHubAsyncClient( +def test_get_external_api_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_external_api), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_external_api( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_external_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/externalApis/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_external_api_flattened_error_async(): - client = ApiHubAsyncClient( +def test_get_external_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_external_api( + client.get_external_api( apihub_service.GetExternalApiRequest(), name="name_value", ) +def test_get_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -14816,35 +12468,119 @@ async def test_get_external_api_flattened_error_async(): dict, ], ) -def test_update_external_api(request_type, transport: str = "grpc"): +def test_update_external_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "external_api": { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + } + request_init["external_api"] = { + "name": "projects/sample1/locations/sample2/externalApis/sample3", + "display_name": "display_name_value", + "description": "description_value", + "endpoints": ["endpoints_value1", "endpoints_value2"], + "paths": ["paths_value1", "paths_value2"], + "documentation": {"external_uri": "external_uri_value"}, + "attributes": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi( + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateExternalApiRequest.meta.fields["external_api"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["external_api"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["external_api"][field])): + del request_init["external_api"][field][i][subfield] + else: + del request_init["external_api"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi( name="name_value", display_name="display_name_value", description="description_value", endpoints=["endpoints_value"], paths=["paths_value"], ) - response = client.update_external_api(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateExternalApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_external_api(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ExternalApi) @@ -14855,60 +12591,13 @@ def test_update_external_api(request_type, transport: str = "grpc"): assert response.paths == ["paths_value"] -def test_update_external_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateExternalApiRequest() - - -def test_update_external_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateExternalApiRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_external_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateExternalApiRequest() - - -def test_update_external_api_use_cached_wrapped_rpc(): +def test_update_external_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -14928,6 +12617,7 @@ def test_update_external_api_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_external_api ] = mock_rpc + request = {} client.update_external_api(request) @@ -14941,225 +12631,229 @@ def test_update_external_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_external_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.update_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateExternalApiRequest() - - -@pytest.mark.asyncio -async def test_update_external_api_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_external_api_rest_required_fields( + request_type=apihub_service.UpdateExternalApiRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_external_api - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_external_api - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_external_api(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_external_api(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_external_api._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_external_api_async( - transport: str = "grpc_asyncio", - request_type=apihub_service.UpdateExternalApiRequest, -): - client = ApiHubAsyncClient( + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - ) - response = await client.update_external_api(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateExternalApiRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_external_api(request) -@pytest.mark.asyncio -async def test_update_external_api_async_from_dict(): - await test_update_external_api_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_external_api_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateExternalApiRequest() + unset_fields = transport.update_external_api._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "externalApi", + "updateMask", + ) + ) + ) - request.external_api.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - call.return_value = common_fields.ExternalApi() - client.update_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_external_api" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_update_external_api" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateExternalApiRequest.pb( + apihub_service.UpdateExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "external_api.name=name_value", - ) in kw["metadata"] + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ExternalApi.to_json( + common_fields.ExternalApi() + ) + request = apihub_service.UpdateExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ExternalApi() -@pytest.mark.asyncio -async def test_update_external_api_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client.update_external_api( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateExternalApiRequest() + pre.assert_called_once() + post.assert_called_once() - request.external_api.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() - ) - await client.update_external_api(request) +def test_update_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateExternalApiRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = { + "external_api": { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + } + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "external_api.name=name_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_external_api(request) -def test_update_external_api_flattened(): +def test_update_external_api_rest_flattened(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_external_api( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ExternalApi() + + # get arguments that satisfy an http rule for this method + sample_request = { + "external_api": { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( external_api=common_fields.ExternalApi(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ExternalApi.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_external_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].external_api - mock_val = common_fields.ExternalApi(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{external_api.name=projects/*/locations/*/externalApis/*}" + % client.transport._host, + args[1], + ) -def test_update_external_api_flattened_error(): +def test_update_external_api_rest_flattened_error(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -15172,56 +12866,11 @@ def test_update_external_api_flattened_error(): ) -@pytest.mark.asyncio -async def test_update_external_api_flattened_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ExternalApi() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ExternalApi() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_external_api( - external_api=common_fields.ExternalApi(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].external_api - mock_val = common_fields.ExternalApi(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_external_api_flattened_error_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_external_api( - apihub_service.UpdateExternalApiRequest(), - external_api=common_fields.ExternalApi(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - @pytest.mark.parametrize( "request_type", @@ -15230,92 +12879,41 @@ async def test_update_external_api_flattened_error_async(): dict, ], ) -def test_delete_external_api(request_type, transport: str = "grpc"): +def test_delete_external_api_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_external_api(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteExternalApiRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_external_api(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_external_api_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteExternalApiRequest() - - -def test_delete_external_api_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteExternalApiRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_external_api(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteExternalApiRequest( - name="name_value", - ) - - -def test_delete_external_api_use_cached_wrapped_rpc(): +def test_delete_external_api_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -15335,6 +12933,7 @@ def test_delete_external_api_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_external_api ] = mock_rpc + request = {} client.delete_external_api(request) @@ -15348,253 +12947,220 @@ def test_delete_external_api_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_external_api_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_external_api() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteExternalApiRequest() - - -@pytest.mark.asyncio -async def test_delete_external_api_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_external_api - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_external_api - ] = mock_rpc - - request = {} - await client.delete_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_external_api_async( - transport: str = "grpc_asyncio", +def test_delete_external_api_rest_required_fields( request_type=apihub_service.DeleteExternalApiRequest, ): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.ApiHubRestTransport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_external_api(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteExternalApiRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_external_api_async_from_dict(): - await test_delete_external_api_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_external_api._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_external_api_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteExternalApiRequest() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - request.name = "name_value" + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - call.return_value = None - client.delete_external_api(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_external_api(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_external_api_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_external_api_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteExternalApiRequest() + unset_fields = transport.delete_external_api._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) - request.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_external_api_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), + ) + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_delete_external_api" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteExternalApiRequest.pb( + apihub_service.DeleteExternalApiRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() -def test_delete_external_api_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.DeleteExternalApiRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.delete_external_api( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() -def test_delete_external_api_flattened_error(): +def test_delete_external_api_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteExternalApiRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_external_api( - apihub_service.DeleteExternalApiRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_external_api(request) -@pytest.mark.asyncio -async def test_delete_external_api_flattened_async(): - client = ApiHubAsyncClient( +def test_delete_external_api_rest_flattened(): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_external_api), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_external_api( - name="name_value", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/externalApis/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_external_api(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/externalApis/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_external_api_flattened_error_async(): - client = ApiHubAsyncClient( +def test_delete_external_api_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_external_api( + client.delete_external_api( apihub_service.DeleteExternalApiRequest(), name="name_value", ) +def test_delete_external_api_rest_error(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -15602,97 +13168,46 @@ async def test_delete_external_api_flattened_error_async(): dict, ], ) -def test_list_external_apis(request_type, transport: str = "grpc"): +def test_list_external_apis_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListExternalApisResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListExternalApisResponse( next_page_token="next_page_token_value", ) - response = client.list_external_apis(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListExternalApisRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListExternalApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_external_apis(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExternalApisPager) assert response.next_page_token == "next_page_token_value" -def test_list_external_apis_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_external_apis() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListExternalApisRequest() - - -def test_list_external_apis_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListExternalApisRequest( - parent="parent_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_external_apis(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListExternalApisRequest( - parent="parent_value", - page_token="page_token_value", - ) - - -def test_list_external_apis_use_cached_wrapped_rpc(): +def test_list_external_apis_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -15712,6 +13227,7 @@ def test_list_external_apis_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_external_apis ] = mock_rpc + request = {} client.list_external_apis(request) @@ -15725,277 +13241,252 @@ def test_list_external_apis_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_external_apis_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListExternalApisResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_external_apis() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListExternalApisRequest() - - -@pytest.mark.asyncio -async def test_list_external_apis_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_external_apis_rest_required_fields( + request_type=apihub_service.ListExternalApisRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.list_external_apis - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_external_apis - ] = mock_rpc - - request = {} - await client.list_external_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_external_apis(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.ApiHubRestTransport -@pytest.mark.asyncio -async def test_list_external_apis_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListExternalApisRequest -): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListExternalApisResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_external_apis(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListExternalApisRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_external_apis._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExternalApisAsyncPager) - assert response.next_page_token == "next_page_token_value" + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -@pytest.mark.asyncio -async def test_list_external_apis_async_from_dict(): - await test_list_external_apis_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_external_apis._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -def test_list_external_apis_field_headers(): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListExternalApisRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - call.return_value = apihub_service.ListExternalApisResponse() - client.list_external_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_external_apis_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListExternalApisResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListExternalApisRequest() + response_value = Response() + response_value.status_code = 200 - request.parent = "parent_value" + # Convert return value to protobuf type + return_value = apihub_service.ListExternalApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListExternalApisResponse() - ) - await client.list_external_apis(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.list_external_apis(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_external_apis_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_external_apis_rest_unset_required_fields(): + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListExternalApisResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_external_apis( - parent="parent_value", + unset_fields = transport.list_external_apis._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_external_apis_flattened_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), + & set(("parent",)) ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_external_apis( - apihub_service.ListExternalApisRequest(), - parent="parent_value", - ) - -@pytest.mark.asyncio -async def test_list_external_apis_flattened_async(): - client = ApiHubAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_external_apis_rest_interceptors(null_interceptor): + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubClient(transport=transport) with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListExternalApisResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListExternalApisResponse() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_external_apis" + ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "pre_list_external_apis" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListExternalApisRequest.pb( + apihub_service.ListExternalApisRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListExternalApisResponse.to_json( + apihub_service.ListExternalApisResponse() + ) + + request = apihub_service.ListExternalApisRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListExternalApisResponse() + + client.list_external_apis( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_external_apis( + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_external_apis_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListExternalApisRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_external_apis(request) + + +def test_list_external_apis_rest_flattened(): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListExternalApisResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListExternalApisResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_external_apis(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/externalApis" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_external_apis_flattened_error_async(): - client = ApiHubAsyncClient( +def test_list_external_apis_rest_flattened_error(transport: str = "rest"): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_external_apis( + client.list_external_apis( apihub_service.ListExternalApisRequest(), parent="parent_value", ) -def test_list_external_apis_pager(transport_name: str = "grpc"): +def test_list_external_apis_rest_pager(transport: str = "rest"): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListExternalApisResponse( external_apis=[ common_fields.ExternalApi(), @@ -16020,15040 +13511,1132 @@ def test_list_external_apis_pager(transport_name: str = "grpc"): common_fields.ExternalApi(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListExternalApisResponse.to_json(x) for x in response ) - pager = client.list_external_apis(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_external_apis(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.ExternalApi) for i in results) + pages = list(client.list_external_apis(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_list_external_apis_pages(transport_name: str = "grpc"): - client = ApiHubClient( + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - next_page_token="abc", - ), - apihub_service.ListExternalApisResponse( - external_apis=[], - next_page_token="def", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - ], - next_page_token="ghi", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - ), - RuntimeError, + with pytest.raises(ValueError): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - pages = list(client.list_external_apis(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_external_apis_async_pager(): - client = ApiHubAsyncClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) + with pytest.raises(ValueError): + client = ApiHubClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - next_page_token="abc", - ), - apihub_service.ListExternalApisResponse( - external_apis=[], - next_page_token="def", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - ], - next_page_token="ghi", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - ), - RuntimeError, + # It is an error to provide an api_key and a transport instance. + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubClient( + client_options=options, + transport=transport, ) - async_pager = await client.list_external_apis( - request={}, + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, common_fields.ExternalApi) for i in responses) + # It is an error to provide scopes and a transport instance. + transport = transports.ApiHubRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -@pytest.mark.asyncio -async def test_list_external_apis_async_pages(): - client = ApiHubAsyncClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ApiHubRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_external_apis), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - next_page_token="abc", - ), - apihub_service.ListExternalApisResponse( - external_apis=[], - next_page_token="def", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - ], - next_page_token="ghi", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_external_apis(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateApiRequest, - dict, - ], -) -def test_create_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["api"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "owner": {"display_name": "display_name_value", "email": "email_value"}, - "versions": ["versions_value1", "versions_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "target_user": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "team": {}, - "business_unit": {}, - "maturity_level": {}, - "attributes": {}, - "api_style": {}, - "selected_version": "selected_version_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateApiRequest.meta.fields["api"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["api"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["api"][field])): - del request_init["api"][field][i][subfield] - else: - del request_init["api"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" - - -def test_create_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_api] = mock_rpc - - request = {} - client.create_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_api_rest_required_fields(request_type=apihub_service.CreateApiRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("api_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_api._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("apiId",)) - & set( - ( - "parent", - "api", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateApiRequest.pb( - apihub_service.CreateApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - - request = apihub_service.CreateApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Api() - - client.create_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_api(request) - - -def test_create_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - api=common_fields.Api(name="name_value"), - api_id="api_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, - args[1], - ) - - -def test_create_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_api( - apihub_service.CreateApiRequest(), - parent="parent_value", - api=common_fields.Api(name="name_value"), - api_id="api_id_value", - ) - - -def test_create_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetApiRequest, - dict, - ], -) -def test_get_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" - - -def test_get_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_api] = mock_rpc - - request = {} - client.get_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_api_rest_required_fields(request_type=apihub_service.GetApiRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_api._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetApiRequest.pb(apihub_service.GetApiRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - - request = apihub_service.GetApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Api() - - client.get_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_api(request) - - -def test_get_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, - args[1], - ) - - -def test_get_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api( - apihub_service.GetApiRequest(), - name="name_value", - ) - - -def test_get_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListApisRequest, - dict, - ], -) -def test_list_apis_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApisResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_apis(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListApisPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_apis_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_apis in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_apis] = mock_rpc - - request = {} - client.list_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_apis(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_apis_rest_required_fields(request_type=apihub_service.ListApisRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_apis._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_apis._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApisResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_apis(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_apis_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_apis._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_apis_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_apis" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_apis" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListApisRequest.pb(apihub_service.ListApisRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListApisResponse.to_json( - apihub_service.ListApisResponse() - ) - - request = apihub_service.ListApisRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListApisResponse() - - client.list_apis( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_apis_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListApisRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_apis(request) - - -def test_list_apis_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApisResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_apis(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/apis" % client.transport._host, - args[1], - ) - - -def test_list_apis_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_apis( - apihub_service.ListApisRequest(), - parent="parent_value", - ) - - -def test_list_apis_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - common_fields.Api(), - ], - next_page_token="abc", - ), - apihub_service.ListApisResponse( - apis=[], - next_page_token="def", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - ], - next_page_token="ghi", - ), - apihub_service.ListApisResponse( - apis=[ - common_fields.Api(), - common_fields.Api(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(apihub_service.ListApisResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_apis(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Api) for i in results) - - pages = list(client.list_apis(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateApiRequest, - dict, - ], -) -def test_update_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} - request_init["api"] = { - "name": "projects/sample1/locations/sample2/apis/sample3", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "owner": {"display_name": "display_name_value", "email": "email_value"}, - "versions": ["versions_value1", "versions_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "target_user": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "team": {}, - "business_unit": {}, - "maturity_level": {}, - "attributes": {}, - "api_style": {}, - "selected_version": "selected_version_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateApiRequest.meta.fields["api"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["api"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["api"][field])): - del request_init["api"][field][i][subfield] - else: - del request_init["api"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api( - name="name_value", - display_name="display_name_value", - description="description_value", - versions=["versions_value"], - selected_version="selected_version_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Api) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.versions == ["versions_value"] - assert response.selected_version == "selected_version_value" - - -def test_update_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_api] = mock_rpc - - request = {} - client.update_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_api_rest_required_fields(request_type=apihub_service.UpdateApiRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_api._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "api", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateApiRequest.pb( - apihub_service.UpdateApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Api.to_json(common_fields.Api()) - - request = apihub_service.UpdateApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Api() - - client.update_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"api": {"name": "projects/sample1/locations/sample2/apis/sample3"}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_api(request) - - -def test_update_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Api() - - # get arguments that satisfy an http rule for this method - sample_request = { - "api": {"name": "projects/sample1/locations/sample2/apis/sample3"} - } - - # get truthy value for each flattened field - mock_args = dict( - api=common_fields.Api(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Api.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{api.name=projects/*/locations/*/apis/*}" % client.transport._host, - args[1], - ) - - -def test_update_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_api( - apihub_service.UpdateApiRequest(), - api=common_fields.Api(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteApiRequest, - dict, - ], -) -def test_delete_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_api(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_api] = mock_rpc - - request = {} - client.delete_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_api_rest_required_fields(request_type=apihub_service.DeleteApiRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_api._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_api" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteApiRequest.pb( - apihub_service.DeleteApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_api(request) - - -def test_delete_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/apis/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*}" % client.transport._host, - args[1], - ) - - -def test_delete_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_api( - apihub_service.DeleteApiRequest(), - name="name_value", - ) - - -def test_delete_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateVersionRequest, - dict, - ], -) -def test_create_version_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - request_init["version"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "specs": ["specs_value1", "specs_value2"], - "api_operations": ["api_operations_value1", "api_operations_value2"], - "definitions": ["definitions_value1", "definitions_value2"], - "deployments": ["deployments_value1", "deployments_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "lifecycle": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "compliance": {}, - "accreditation": {}, - "attributes": {}, - "selected_deployment": "selected_deployment_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateVersionRequest.meta.fields["version"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["version"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["version"][field])): - del request_init["version"][field][i][subfield] - else: - del request_init["version"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_version(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" - - -def test_create_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_version] = mock_rpc - - request = {} - client.create_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_version_rest_required_fields( - request_type=apihub_service.CreateVersionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_version._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("version_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_version(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_version_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_version._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("versionId",)) - & set( - ( - "parent", - "version", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_version_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_version" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_version" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateVersionRequest.pb( - apihub_service.CreateVersionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Version.to_json( - common_fields.Version() - ) - - request = apihub_service.CreateVersionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Version() - - client.create_version( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_version_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateVersionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_version(request) - - -def test_create_version_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - version=common_fields.Version(name="name_value"), - version_id="version_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" - % client.transport._host, - args[1], - ) - - -def test_create_version_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_version( - apihub_service.CreateVersionRequest(), - parent="parent_value", - version=common_fields.Version(name="name_value"), - version_id="version_id_value", - ) - - -def test_create_version_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetVersionRequest, - dict, - ], -) -def test_get_version_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_version(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" - - -def test_get_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_version] = mock_rpc - - request = {} - client.get_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_version_rest_required_fields( - request_type=apihub_service.GetVersionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_version(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_version_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_version._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_version_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_version" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_version" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetVersionRequest.pb( - apihub_service.GetVersionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Version.to_json( - common_fields.Version() - ) - - request = apihub_service.GetVersionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Version() - - client.get_version( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_version_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetVersionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_version(request) - - -def test_get_version_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" - % client.transport._host, - args[1], - ) - - -def test_get_version_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_version( - apihub_service.GetVersionRequest(), - name="name_value", - ) - - -def test_get_version_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListVersionsRequest, - dict, - ], -) -def test_list_versions_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListVersionsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListVersionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_versions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVersionsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_versions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_versions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_versions] = mock_rpc - - request = {} - client.list_versions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_versions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_versions_rest_required_fields( - request_type=apihub_service.ListVersionsRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_versions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_versions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListVersionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListVersionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_versions(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_versions_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_versions._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_versions_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_versions" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_versions" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListVersionsRequest.pb( - apihub_service.ListVersionsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListVersionsResponse.to_json( - apihub_service.ListVersionsResponse() - ) - - request = apihub_service.ListVersionsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListVersionsResponse() - - client.list_versions( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_versions_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListVersionsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_versions(request) - - -def test_list_versions_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListVersionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListVersionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_versions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*}/versions" - % client.transport._host, - args[1], - ) - - -def test_list_versions_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_versions( - apihub_service.ListVersionsRequest(), - parent="parent_value", - ) - - -def test_list_versions_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - common_fields.Version(), - ], - next_page_token="abc", - ), - apihub_service.ListVersionsResponse( - versions=[], - next_page_token="def", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - ], - next_page_token="ghi", - ), - apihub_service.ListVersionsResponse( - versions=[ - common_fields.Version(), - common_fields.Version(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListVersionsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2/apis/sample3"} - - pager = client.list_versions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Version) for i in results) - - pages = list(client.list_versions(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateVersionRequest, - dict, - ], -) -def test_update_version_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "version": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - } - request_init["version"] = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "specs": ["specs_value1", "specs_value2"], - "api_operations": ["api_operations_value1", "api_operations_value2"], - "definitions": ["definitions_value1", "definitions_value2"], - "deployments": ["deployments_value1", "deployments_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "lifecycle": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "compliance": {}, - "accreditation": {}, - "attributes": {}, - "selected_deployment": "selected_deployment_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateVersionRequest.meta.fields["version"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["version"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["version"][field])): - del request_init["version"][field][i][subfield] - else: - del request_init["version"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version( - name="name_value", - display_name="display_name_value", - description="description_value", - specs=["specs_value"], - api_operations=["api_operations_value"], - definitions=["definitions_value"], - deployments=["deployments_value"], - selected_deployment="selected_deployment_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_version(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Version) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.specs == ["specs_value"] - assert response.api_operations == ["api_operations_value"] - assert response.definitions == ["definitions_value"] - assert response.deployments == ["deployments_value"] - assert response.selected_deployment == "selected_deployment_value" - - -def test_update_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_version] = mock_rpc - - request = {} - client.update_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_version_rest_required_fields( - request_type=apihub_service.UpdateVersionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_version._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_version(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_version_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_version._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "version", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_version_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_version" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_version" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateVersionRequest.pb( - apihub_service.UpdateVersionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Version.to_json( - common_fields.Version() - ) - - request = apihub_service.UpdateVersionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Version() - - client.update_version( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_version_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateVersionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "version": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_version(request) - - -def test_update_version_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Version() - - # get arguments that satisfy an http rule for this method - sample_request = { - "version": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - } - - # get truthy value for each flattened field - mock_args = dict( - version=common_fields.Version(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Version.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{version.name=projects/*/locations/*/apis/*/versions/*}" - % client.transport._host, - args[1], - ) - - -def test_update_version_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_version( - apihub_service.UpdateVersionRequest(), - version=common_fields.Version(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_version_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteVersionRequest, - dict, - ], -) -def test_delete_version_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_version(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_version_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_version in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_version] = mock_rpc - - request = {} - client.delete_version(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_version(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_version_rest_required_fields( - request_type=apihub_service.DeleteVersionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_version._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_version._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_version(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_version_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_version._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_version_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_version" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteVersionRequest.pb( - apihub_service.DeleteVersionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteVersionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_version( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_version_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteVersionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_version(request) - - -def test_delete_version_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_version(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_version_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_version( - apihub_service.DeleteVersionRequest(), - name="name_value", - ) - - -def test_delete_version_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateSpecRequest, - dict, - ], -) -def test_create_spec_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request_init["spec"] = { - "name": "name_value", - "display_name": "display_name_value", - "spec_type": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, - "details": { - "open_api_spec_details": { - "format_": 1, - "version": "version_value", - "owner": {"display_name": "display_name_value", "email": "email_value"}, - }, - "description": "description_value", - }, - "source_uri": "source_uri_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "lint_response": { - "issues": [ - { - "code": "code_value", - "path": ["path_value1", "path_value2"], - "message": "message_value", - "severity": 1, - "range_": {"start": {"line": 424, "character": 941}, "end": {}}, - } - ], - "summary": [{"severity": 1, "count": 553}], - "state": 1, - "source": "source_value", - "linter": 1, - "create_time": {}, - }, - "attributes": {}, - "documentation": {"external_uri": "external_uri_value"}, - "parsing_mode": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateSpecRequest.meta.fields["spec"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["spec"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["spec"][field])): - del request_init["spec"][field][i][subfield] - else: - del request_init["spec"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED - - -def test_create_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_spec] = mock_rpc - - request = {} - client.create_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_spec_rest_required_fields( - request_type=apihub_service.CreateSpecRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_spec._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("spec_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_spec_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_spec._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("specId",)) - & set( - ( - "parent", - "spec", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_spec_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_spec" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_spec" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateSpecRequest.pb( - apihub_service.CreateSpecRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - - request = apihub_service.CreateSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Spec() - - client.create_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_spec_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateSpecRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_spec(request) - - -def test_create_spec_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - spec=common_fields.Spec(name="name_value"), - spec_id="spec_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" - % client.transport._host, - args[1], - ) - - -def test_create_spec_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_spec( - apihub_service.CreateSpecRequest(), - parent="parent_value", - spec=common_fields.Spec(name="name_value"), - spec_id="spec_id_value", - ) - - -def test_create_spec_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetSpecRequest, - dict, - ], -) -def test_get_spec_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED - - -def test_get_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_spec] = mock_rpc - - request = {} - client.get_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_spec_rest_required_fields(request_type=apihub_service.GetSpecRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_spec_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_spec_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_spec" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_spec" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetSpecRequest.pb(apihub_service.GetSpecRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - - request = apihub_service.GetSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Spec() - - client.get_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_spec_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetSpecRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_spec(request) - - -def test_get_spec_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" - % client.transport._host, - args[1], - ) - - -def test_get_spec_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_spec( - apihub_service.GetSpecRequest(), - name="name_value", - ) - - -def test_get_spec_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetSpecContentsRequest, - dict, - ], -) -def test_get_spec_contents_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.SpecContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.SpecContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_spec_contents(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.SpecContents) - assert response.contents == b"contents_blob" - assert response.mime_type == "mime_type_value" - - -def test_get_spec_contents_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_spec_contents in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_spec_contents - ] = mock_rpc - - request = {} - client.get_spec_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_spec_contents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_spec_contents_rest_required_fields( - request_type=apihub_service.GetSpecContentsRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_spec_contents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_spec_contents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.SpecContents() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.SpecContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_spec_contents(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_spec_contents_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_spec_contents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_spec_contents_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_spec_contents" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_spec_contents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetSpecContentsRequest.pb( - apihub_service.GetSpecContentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.SpecContents.to_json( - common_fields.SpecContents() - ) - - request = apihub_service.GetSpecContentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.SpecContents() - - client.get_spec_contents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_spec_contents_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetSpecContentsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_spec_contents(request) - - -def test_get_spec_contents_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.SpecContents() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.SpecContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_spec_contents(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}:contents" - % client.transport._host, - args[1], - ) - - -def test_get_spec_contents_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_spec_contents( - apihub_service.GetSpecContentsRequest(), - name="name_value", - ) - - -def test_get_spec_contents_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListSpecsRequest, - dict, - ], -) -def test_list_specs_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListSpecsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_specs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSpecsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_specs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_specs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_specs] = mock_rpc - - request = {} - client.list_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_specs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_specs_rest_required_fields(request_type=apihub_service.ListSpecsRequest): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_specs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_specs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListSpecsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_specs(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_specs_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_specs._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_specs_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_specs" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_specs" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListSpecsRequest.pb( - apihub_service.ListSpecsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListSpecsResponse.to_json( - apihub_service.ListSpecsResponse() - ) - - request = apihub_service.ListSpecsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListSpecsResponse() - - client.list_specs( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_specs_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListSpecsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_specs(request) - - -def test_list_specs_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListSpecsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_specs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/specs" - % client.transport._host, - args[1], - ) - - -def test_list_specs_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_specs( - apihub_service.ListSpecsRequest(), - parent="parent_value", - ) - - -def test_list_specs_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - common_fields.Spec(), - ], - next_page_token="abc", - ), - apihub_service.ListSpecsResponse( - specs=[], - next_page_token="def", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - ], - next_page_token="ghi", - ), - apihub_service.ListSpecsResponse( - specs=[ - common_fields.Spec(), - common_fields.Spec(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(apihub_service.ListSpecsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - pager = client.list_specs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Spec) for i in results) - - pages = list(client.list_specs(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateSpecRequest, - dict, - ], -) -def test_update_spec_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "spec": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - } - request_init["spec"] = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5", - "display_name": "display_name_value", - "spec_type": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, - "details": { - "open_api_spec_details": { - "format_": 1, - "version": "version_value", - "owner": {"display_name": "display_name_value", "email": "email_value"}, - }, - "description": "description_value", - }, - "source_uri": "source_uri_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "lint_response": { - "issues": [ - { - "code": "code_value", - "path": ["path_value1", "path_value2"], - "message": "message_value", - "severity": 1, - "range_": {"start": {"line": 424, "character": 941}, "end": {}}, - } - ], - "summary": [{"severity": 1, "count": 553}], - "state": 1, - "source": "source_value", - "linter": 1, - "create_time": {}, - }, - "attributes": {}, - "documentation": {"external_uri": "external_uri_value"}, - "parsing_mode": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateSpecRequest.meta.fields["spec"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["spec"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["spec"][field])): - del request_init["spec"][field][i][subfield] - else: - del request_init["spec"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec( - name="name_value", - display_name="display_name_value", - source_uri="source_uri_value", - parsing_mode=common_fields.Spec.ParsingMode.RELAXED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Spec) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.source_uri == "source_uri_value" - assert response.parsing_mode == common_fields.Spec.ParsingMode.RELAXED - - -def test_update_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.update_spec] = mock_rpc - - request = {} - client.update_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_spec_rest_required_fields( - request_type=apihub_service.UpdateSpecRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_spec._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_spec_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_spec._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "spec", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_spec_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_spec" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_spec" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateSpecRequest.pb( - apihub_service.UpdateSpecRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Spec.to_json(common_fields.Spec()) - - request = apihub_service.UpdateSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Spec() - - client.update_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_spec_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateSpecRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "spec": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_spec(request) - - -def test_update_spec_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Spec() - - # get arguments that satisfy an http rule for this method - sample_request = { - "spec": { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - } - - # get truthy value for each flattened field - mock_args = dict( - spec=common_fields.Spec(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Spec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{spec.name=projects/*/locations/*/apis/*/versions/*/specs/*}" - % client.transport._host, - args[1], - ) - - -def test_update_spec_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_spec( - apihub_service.UpdateSpecRequest(), - spec=common_fields.Spec(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_spec_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteSpecRequest, - dict, - ], -) -def test_delete_spec_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_spec(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.delete_spec] = mock_rpc - - request = {} - client.delete_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_spec_rest_required_fields( - request_type=apihub_service.DeleteSpecRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_spec_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_spec_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_spec" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteSpecRequest.pb( - apihub_service.DeleteSpecRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_spec_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteSpecRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_spec(request) - - -def test_delete_spec_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/specs/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_spec_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_spec( - apihub_service.DeleteSpecRequest(), - name="name_value", - ) - - -def test_delete_spec_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetApiOperationRequest, - dict, - ], -) -def test_get_api_operation_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiOperation( - name="name_value", - spec="spec_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ApiOperation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_api_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ApiOperation) - assert response.name == "name_value" - assert response.spec == "spec_value" - - -def test_get_api_operation_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_api_operation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_api_operation - ] = mock_rpc - - request = {} - client.get_api_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_api_operation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_api_operation_rest_required_fields( - request_type=apihub_service.GetApiOperationRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api_operation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api_operation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiOperation() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ApiOperation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_api_operation(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_api_operation_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_api_operation._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_api_operation_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_api_operation" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_api_operation" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetApiOperationRequest.pb( - apihub_service.GetApiOperationRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ApiOperation.to_json( - common_fields.ApiOperation() - ) - - request = apihub_service.GetApiOperationRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ApiOperation() - - client.get_api_operation( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_api_operation_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetApiOperationRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_api_operation(request) - - -def test_get_api_operation_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiOperation() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/operations/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ApiOperation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_api_operation(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/operations/*}" - % client.transport._host, - args[1], - ) - - -def test_get_api_operation_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api_operation( - apihub_service.GetApiOperationRequest(), - name="name_value", - ) - - -def test_get_api_operation_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListApiOperationsRequest, - dict, - ], -) -def test_list_api_operations_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApiOperationsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListApiOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_api_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListApiOperationsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_api_operations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_api_operations in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_api_operations - ] = mock_rpc - - request = {} - client.list_api_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_api_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_api_operations_rest_required_fields( - request_type=apihub_service.ListApiOperationsRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_api_operations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_api_operations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApiOperationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListApiOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_api_operations(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_api_operations_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_api_operations._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_api_operations_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_api_operations" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_api_operations" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListApiOperationsRequest.pb( - apihub_service.ListApiOperationsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListApiOperationsResponse.to_json( - apihub_service.ListApiOperationsResponse() - ) - - request = apihub_service.ListApiOperationsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListApiOperationsResponse() - - client.list_api_operations( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_api_operations_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListApiOperationsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_api_operations(request) - - -def test_list_api_operations_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListApiOperationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListApiOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_api_operations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/apis/*/versions/*}/operations" - % client.transport._host, - args[1], - ) - - -def test_list_api_operations_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_api_operations( - apihub_service.ListApiOperationsRequest(), - parent="parent_value", - ) - - -def test_list_api_operations_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - next_page_token="abc", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[], - next_page_token="def", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - ], - next_page_token="ghi", - ), - apihub_service.ListApiOperationsResponse( - api_operations=[ - common_fields.ApiOperation(), - common_fields.ApiOperation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListApiOperationsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/apis/sample3/versions/sample4" - } - - pager = client.list_api_operations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.ApiOperation) for i in results) - - pages = list(client.list_api_operations(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetDefinitionRequest, - dict, - ], -) -def test_get_definition_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Definition( - name="name_value", - spec="spec_value", - type_=common_fields.Definition.Type.SCHEMA, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Definition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_definition(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Definition) - assert response.name == "name_value" - assert response.spec == "spec_value" - assert response.type_ == common_fields.Definition.Type.SCHEMA - - -def test_get_definition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_definition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_definition] = mock_rpc - - request = {} - client.get_definition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_definition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_definition_rest_required_fields( - request_type=apihub_service.GetDefinitionRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_definition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_definition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Definition() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Definition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_definition(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_definition_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_definition._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_definition_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_definition" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_definition" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetDefinitionRequest.pb( - apihub_service.GetDefinitionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Definition.to_json( - common_fields.Definition() - ) - - request = apihub_service.GetDefinitionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Definition() - - client.get_definition( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_definition_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetDefinitionRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_definition(request) - - -def test_get_definition_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Definition() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/definitions/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Definition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_definition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apis/*/versions/*/definitions/*}" - % client.transport._host, - args[1], - ) - - -def test_get_definition_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_definition( - apihub_service.GetDefinitionRequest(), - name="name_value", - ) - - -def test_get_definition_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateDeploymentRequest, - dict, - ], -) -def test_create_deployment_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["deployment"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "deployment_type": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "resource_uri": "resource_uri_value", - "endpoints": ["endpoints_value1", "endpoints_value2"], - "api_versions": ["api_versions_value1", "api_versions_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "slo": {}, - "environment": {}, - "attributes": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateDeploymentRequest.meta.fields["deployment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["deployment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["deployment"][field])): - del request_init["deployment"][field][i][subfield] - else: - del request_init["deployment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_deployment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] - - -def test_create_deployment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_deployment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_deployment - ] = mock_rpc - - request = {} - client.create_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_deployment_rest_required_fields( - request_type=apihub_service.CreateDeploymentRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("deployment_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_deployment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_deployment_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("deploymentId",)) - & set( - ( - "parent", - "deployment", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_deployment_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_deployment" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_deployment" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateDeploymentRequest.pb( - apihub_service.CreateDeploymentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Deployment.to_json( - common_fields.Deployment() - ) - - request = apihub_service.CreateDeploymentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Deployment() - - client.create_deployment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_deployment_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateDeploymentRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_deployment(request) - - -def test_create_deployment_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - deployment=common_fields.Deployment(name="name_value"), - deployment_id="deployment_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" - % client.transport._host, - args[1], - ) - - -def test_create_deployment_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deployment( - apihub_service.CreateDeploymentRequest(), - parent="parent_value", - deployment=common_fields.Deployment(name="name_value"), - deployment_id="deployment_id_value", - ) - - -def test_create_deployment_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetDeploymentRequest, - dict, - ], -) -def test_get_deployment_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_deployment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] - - -def test_get_deployment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_deployment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_deployment] = mock_rpc - - request = {} - client.get_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_deployment_rest_required_fields( - request_type=apihub_service.GetDeploymentRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_deployment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_deployment_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_deployment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_deployment_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_deployment" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_deployment" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetDeploymentRequest.pb( - apihub_service.GetDeploymentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Deployment.to_json( - common_fields.Deployment() - ) - - request = apihub_service.GetDeploymentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Deployment() - - client.get_deployment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_deployment_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetDeploymentRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_deployment(request) - - -def test_get_deployment_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], - ) - - -def test_get_deployment_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deployment( - apihub_service.GetDeploymentRequest(), - name="name_value", - ) - - -def test_get_deployment_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListDeploymentsRequest, - dict, - ], -) -def test_list_deployments_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDeploymentsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListDeploymentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_deployments(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeploymentsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_deployments_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_deployments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_deployments - ] = mock_rpc - - request = {} - client.list_deployments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_deployments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_deployments_rest_required_fields( - request_type=apihub_service.ListDeploymentsRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDeploymentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListDeploymentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_deployments(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_deployments_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_deployments._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_deployments_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_deployments" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_deployments" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListDeploymentsRequest.pb( - apihub_service.ListDeploymentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListDeploymentsResponse.to_json( - apihub_service.ListDeploymentsResponse() - ) - - request = apihub_service.ListDeploymentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListDeploymentsResponse() - - client.list_deployments( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_deployments_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListDeploymentsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_deployments(request) - - -def test_list_deployments_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDeploymentsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListDeploymentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_deployments(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" - % client.transport._host, - args[1], - ) - - -def test_list_deployments_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deployments( - apihub_service.ListDeploymentsRequest(), - parent="parent_value", - ) - - -def test_list_deployments_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - common_fields.Deployment(), - ], - next_page_token="abc", - ), - apihub_service.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - ], - next_page_token="ghi", - ), - apihub_service.ListDeploymentsResponse( - deployments=[ - common_fields.Deployment(), - common_fields.Deployment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListDeploymentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_deployments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Deployment) for i in results) - - pages = list(client.list_deployments(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateDeploymentRequest, - dict, - ], -) -def test_update_deployment_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} - } - request_init["deployment"] = { - "name": "projects/sample1/locations/sample2/deployments/sample3", - "display_name": "display_name_value", - "description": "description_value", - "documentation": {"external_uri": "external_uri_value"}, - "deployment_type": { - "enum_values": { - "values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ] - }, - "string_values": {"values": ["values_value1", "values_value2"]}, - "json_values": {}, - "attribute": "attribute_value", - }, - "resource_uri": "resource_uri_value", - "endpoints": ["endpoints_value1", "endpoints_value2"], - "api_versions": ["api_versions_value1", "api_versions_value2"], - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "slo": {}, - "environment": {}, - "attributes": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateDeploymentRequest.meta.fields["deployment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["deployment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["deployment"][field])): - del request_init["deployment"][field][i][subfield] - else: - del request_init["deployment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment( - name="name_value", - display_name="display_name_value", - description="description_value", - resource_uri="resource_uri_value", - endpoints=["endpoints_value"], - api_versions=["api_versions_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_deployment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Deployment) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.resource_uri == "resource_uri_value" - assert response.endpoints == ["endpoints_value"] - assert response.api_versions == ["api_versions_value"] - - -def test_update_deployment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_deployment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_deployment - ] = mock_rpc - - request = {} - client.update_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_deployment_rest_required_fields( - request_type=apihub_service.UpdateDeploymentRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_deployment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_deployment_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "deployment", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_deployment_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_deployment" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_deployment" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateDeploymentRequest.pb( - apihub_service.UpdateDeploymentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Deployment.to_json( - common_fields.Deployment() - ) - - request = apihub_service.UpdateDeploymentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Deployment() - - client.update_deployment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_deployment_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateDeploymentRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_deployment(request) - - -def test_update_deployment_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Deployment() - - # get arguments that satisfy an http rule for this method - sample_request = { - "deployment": { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - deployment=common_fields.Deployment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], - ) - - -def test_update_deployment_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deployment( - apihub_service.UpdateDeploymentRequest(), - deployment=common_fields.Deployment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_deployment_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteDeploymentRequest, - dict, - ], -) -def test_delete_deployment_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_deployment(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_deployment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_deployment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_deployment - ] = mock_rpc - - request = {} - client.delete_deployment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_deployment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_deployment_rest_required_fields( - request_type=apihub_service.DeleteDeploymentRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_deployment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_deployment_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_deployment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_deployment_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_deployment" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteDeploymentRequest.pb( - apihub_service.DeleteDeploymentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteDeploymentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_deployment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_deployment_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteDeploymentRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_deployment(request) - - -def test_delete_deployment_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_deployment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_deployment_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deployment( - apihub_service.DeleteDeploymentRequest(), - name="name_value", - ) - - -def test_delete_deployment_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateAttributeRequest, - dict, - ], -) -def test_create_attribute_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["attribute"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "definition_type": 1, - "scope": 1, - "data_type": 1, - "allowed_values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ], - "cardinality": 1172, - "mandatory": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateAttributeRequest.meta.fields["attribute"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["attribute"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["attribute"][field])): - del request_init["attribute"][field][i][subfield] - else: - del request_init["attribute"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_attribute(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True - - -def test_create_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_attribute - ] = mock_rpc - - request = {} - client.create_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_attribute_rest_required_fields( - request_type=apihub_service.CreateAttributeRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_attribute._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("attribute_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_attribute(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_attribute_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_attribute._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("attributeId",)) - & set( - ( - "parent", - "attribute", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_attribute_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_attribute" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_attribute" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateAttributeRequest.pb( - apihub_service.CreateAttributeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Attribute.to_json( - common_fields.Attribute() - ) - - request = apihub_service.CreateAttributeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Attribute() - - client.create_attribute( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_attribute_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateAttributeRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_attribute(request) - - -def test_create_attribute_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - attribute=common_fields.Attribute(name="name_value"), - attribute_id="attribute_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, - args[1], - ) - - -def test_create_attribute_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_attribute( - apihub_service.CreateAttributeRequest(), - parent="parent_value", - attribute=common_fields.Attribute(name="name_value"), - attribute_id="attribute_id_value", - ) - - -def test_create_attribute_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetAttributeRequest, - dict, - ], -) -def test_get_attribute_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_attribute(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True - - -def test_get_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_attribute] = mock_rpc - - request = {} - client.get_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_attribute_rest_required_fields( - request_type=apihub_service.GetAttributeRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_attribute(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_attribute_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_attribute._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_attribute_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_attribute" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_attribute" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetAttributeRequest.pb( - apihub_service.GetAttributeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Attribute.to_json( - common_fields.Attribute() - ) - - request = apihub_service.GetAttributeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Attribute() - - client.get_attribute( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_attribute_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetAttributeRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_attribute(request) - - -def test_get_attribute_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/attributes/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, - args[1], - ) - - -def test_get_attribute_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_attribute( - apihub_service.GetAttributeRequest(), - name="name_value", - ) - - -def test_get_attribute_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateAttributeRequest, - dict, - ], -) -def test_update_attribute_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} - } - request_init["attribute"] = { - "name": "projects/sample1/locations/sample2/attributes/sample3", - "display_name": "display_name_value", - "description": "description_value", - "definition_type": 1, - "scope": 1, - "data_type": 1, - "allowed_values": [ - { - "id": "id_value", - "display_name": "display_name_value", - "description": "description_value", - "immutable": True, - } - ], - "cardinality": 1172, - "mandatory": True, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateAttributeRequest.meta.fields["attribute"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["attribute"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["attribute"][field])): - del request_init["attribute"][field][i][subfield] - else: - del request_init["attribute"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute( - name="name_value", - display_name="display_name_value", - description="description_value", - definition_type=common_fields.Attribute.DefinitionType.SYSTEM_DEFINED, - scope=common_fields.Attribute.Scope.API, - data_type=common_fields.Attribute.DataType.ENUM, - cardinality=1172, - mandatory=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_attribute(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Attribute) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert ( - response.definition_type - == common_fields.Attribute.DefinitionType.SYSTEM_DEFINED - ) - assert response.scope == common_fields.Attribute.Scope.API - assert response.data_type == common_fields.Attribute.DataType.ENUM - assert response.cardinality == 1172 - assert response.mandatory is True - - -def test_update_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_attribute - ] = mock_rpc - - request = {} - client.update_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_attribute_rest_required_fields( - request_type=apihub_service.UpdateAttributeRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_attribute._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_attribute(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_attribute_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_attribute._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "attribute", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_attribute_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_attribute" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_attribute" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateAttributeRequest.pb( - apihub_service.UpdateAttributeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Attribute.to_json( - common_fields.Attribute() - ) - - request = apihub_service.UpdateAttributeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Attribute() - - client.update_attribute( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_attribute_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateAttributeRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "attribute": {"name": "projects/sample1/locations/sample2/attributes/sample3"} - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_attribute(request) - - -def test_update_attribute_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Attribute() - - # get arguments that satisfy an http rule for this method - sample_request = { - "attribute": { - "name": "projects/sample1/locations/sample2/attributes/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - attribute=common_fields.Attribute(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Attribute.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{attribute.name=projects/*/locations/*/attributes/*}" - % client.transport._host, - args[1], - ) - - -def test_update_attribute_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_attribute( - apihub_service.UpdateAttributeRequest(), - attribute=common_fields.Attribute(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_attribute_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteAttributeRequest, - dict, - ], -) -def test_delete_attribute_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_attribute(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_attribute_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_attribute in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_attribute - ] = mock_rpc - - request = {} - client.delete_attribute(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_attribute(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_attribute_rest_required_fields( - request_type=apihub_service.DeleteAttributeRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_attribute._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_attribute(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_attribute_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_attribute._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_attribute_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_attribute" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteAttributeRequest.pb( - apihub_service.DeleteAttributeRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteAttributeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_attribute( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_attribute_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteAttributeRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/attributes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_attribute(request) - - -def test_delete_attribute_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/attributes/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_attribute(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/attributes/*}" % client.transport._host, - args[1], - ) - - -def test_delete_attribute_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_attribute( - apihub_service.DeleteAttributeRequest(), - name="name_value", - ) - - -def test_delete_attribute_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListAttributesRequest, - dict, - ], -) -def test_list_attributes_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListAttributesResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListAttributesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_attributes(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAttributesPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_attributes_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_attributes in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_attributes] = mock_rpc - - request = {} - client.list_attributes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_attributes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_attributes_rest_required_fields( - request_type=apihub_service.ListAttributesRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_attributes._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_attributes._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListAttributesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListAttributesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_attributes(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_attributes_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_attributes._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_attributes_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_attributes" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_attributes" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListAttributesRequest.pb( - apihub_service.ListAttributesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListAttributesResponse.to_json( - apihub_service.ListAttributesResponse() - ) - - request = apihub_service.ListAttributesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListAttributesResponse() - - client.list_attributes( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_attributes_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListAttributesRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_attributes(request) - - -def test_list_attributes_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListAttributesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListAttributesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_attributes(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/attributes" % client.transport._host, - args[1], - ) - - -def test_list_attributes_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_attributes( - apihub_service.ListAttributesRequest(), - parent="parent_value", - ) - - -def test_list_attributes_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - common_fields.Attribute(), - ], - next_page_token="abc", - ), - apihub_service.ListAttributesResponse( - attributes=[], - next_page_token="def", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - ], - next_page_token="ghi", - ), - apihub_service.ListAttributesResponse( - attributes=[ - common_fields.Attribute(), - common_fields.Attribute(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListAttributesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_attributes(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Attribute) for i in results) - - pages = list(client.list_attributes(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.SearchResourcesRequest, - dict, - ], -) -def test_search_resources_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.SearchResourcesResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.SearchResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.search_resources(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchResourcesPager) - assert response.next_page_token == "next_page_token_value" - - -def test_search_resources_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_resources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.search_resources - ] = mock_rpc - - request = {} - client.search_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_search_resources_rest_required_fields( - request_type=apihub_service.SearchResourcesRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["location"] = "" - request_init["query"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).search_resources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["location"] = "location_value" - jsonified_request["query"] = "query_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).search_resources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "location" in jsonified_request - assert jsonified_request["location"] == "location_value" - assert "query" in jsonified_request - assert jsonified_request["query"] == "query_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.SearchResourcesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.SearchResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.search_resources(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_search_resources_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.search_resources._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "location", - "query", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_resources_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_search_resources" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_search_resources" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.SearchResourcesRequest.pb( - apihub_service.SearchResourcesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.SearchResourcesResponse.to_json( - apihub_service.SearchResourcesResponse() - ) - - request = apihub_service.SearchResourcesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.SearchResourcesResponse() - - client.search_resources( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_search_resources_rest_bad_request( - transport: str = "rest", request_type=apihub_service.SearchResourcesRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"location": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.search_resources(request) - - -def test_search_resources_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.SearchResourcesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"location": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - location="location_value", - query="query_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.SearchResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.search_resources(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{location=projects/*/locations/*}:searchResources" - % client.transport._host, - args[1], - ) - - -def test_search_resources_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_resources( - apihub_service.SearchResourcesRequest(), - location="location_value", - query="query_value", - ) - - -def test_search_resources_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - next_page_token="abc", - ), - apihub_service.SearchResourcesResponse( - search_results=[], - next_page_token="def", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - ], - next_page_token="ghi", - ), - apihub_service.SearchResourcesResponse( - search_results=[ - apihub_service.SearchResult(), - apihub_service.SearchResult(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.SearchResourcesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"location": "projects/sample1/locations/sample2"} - - pager = client.search_resources(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, apihub_service.SearchResult) for i in results) - - pages = list(client.search_resources(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateExternalApiRequest, - dict, - ], -) -def test_create_external_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["external_api"] = { - "name": "name_value", - "display_name": "display_name_value", - "description": "description_value", - "endpoints": ["endpoints_value1", "endpoints_value2"], - "paths": ["paths_value1", "paths_value2"], - "documentation": {"external_uri": "external_uri_value"}, - "attributes": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateExternalApiRequest.meta.fields["external_api"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["external_api"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["external_api"][field])): - del request_init["external_api"][field][i][subfield] - else: - del request_init["external_api"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_external_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] - - -def test_create_external_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_external_api in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_external_api - ] = mock_rpc - - request = {} - client.create_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_external_api_rest_required_fields( - request_type=apihub_service.CreateExternalApiRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_external_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("external_api_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_external_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_external_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_external_api._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("externalApiId",)) - & set( - ( - "parent", - "externalApi", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_external_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_create_external_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_create_external_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateExternalApiRequest.pb( - apihub_service.CreateExternalApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ExternalApi.to_json( - common_fields.ExternalApi() - ) - - request = apihub_service.CreateExternalApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ExternalApi() - - client.create_external_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_external_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateExternalApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_external_api(request) - - -def test_create_external_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - external_api=common_fields.ExternalApi(name="name_value"), - external_api_id="external_api_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_external_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/externalApis" - % client.transport._host, - args[1], - ) - - -def test_create_external_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_external_api( - apihub_service.CreateExternalApiRequest(), - parent="parent_value", - external_api=common_fields.ExternalApi(name="name_value"), - external_api_id="external_api_id_value", - ) - - -def test_create_external_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetExternalApiRequest, - dict, - ], -) -def test_get_external_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_external_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] - - -def test_get_external_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_external_api in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_external_api - ] = mock_rpc - - request = {} - client.get_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_external_api_rest_required_fields( - request_type=apihub_service.GetExternalApiRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_external_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_external_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_external_api._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_external_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_get_external_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_get_external_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetExternalApiRequest.pb( - apihub_service.GetExternalApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ExternalApi.to_json( - common_fields.ExternalApi() - ) - - request = apihub_service.GetExternalApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ExternalApi() - - client.get_external_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_external_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetExternalApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_external_api(request) - - -def test_get_external_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_external_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/externalApis/*}" - % client.transport._host, - args[1], - ) - - -def test_get_external_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_external_api( - apihub_service.GetExternalApiRequest(), - name="name_value", - ) - - -def test_get_external_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateExternalApiRequest, - dict, - ], -) -def test_update_external_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "external_api": { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - } - request_init["external_api"] = { - "name": "projects/sample1/locations/sample2/externalApis/sample3", - "display_name": "display_name_value", - "description": "description_value", - "endpoints": ["endpoints_value1", "endpoints_value2"], - "paths": ["paths_value1", "paths_value2"], - "documentation": {"external_uri": "external_uri_value"}, - "attributes": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateExternalApiRequest.meta.fields["external_api"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["external_api"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["external_api"][field])): - del request_init["external_api"][field][i][subfield] - else: - del request_init["external_api"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi( - name="name_value", - display_name="display_name_value", - description="description_value", - endpoints=["endpoints_value"], - paths=["paths_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_external_api(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ExternalApi) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.endpoints == ["endpoints_value"] - assert response.paths == ["paths_value"] - - -def test_update_external_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.update_external_api in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_external_api - ] = mock_rpc - - request = {} - client.update_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_external_api_rest_required_fields( - request_type=apihub_service.UpdateExternalApiRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_external_api._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_external_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_external_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_external_api._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "externalApi", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_external_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_update_external_api" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_update_external_api" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateExternalApiRequest.pb( - apihub_service.UpdateExternalApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ExternalApi.to_json( - common_fields.ExternalApi() - ) - - request = apihub_service.UpdateExternalApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ExternalApi() - - client.update_external_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_external_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateExternalApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "external_api": { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_external_api(request) - - -def test_update_external_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ExternalApi() - - # get arguments that satisfy an http rule for this method - sample_request = { - "external_api": { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - external_api=common_fields.ExternalApi(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ExternalApi.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_external_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{external_api.name=projects/*/locations/*/externalApis/*}" - % client.transport._host, - args[1], - ) - - -def test_update_external_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_external_api( - apihub_service.UpdateExternalApiRequest(), - external_api=common_fields.ExternalApi(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_external_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteExternalApiRequest, - dict, - ], -) -def test_delete_external_api_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_external_api(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_external_api_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.delete_external_api in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_external_api - ] = mock_rpc - - request = {} - client.delete_external_api(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_external_api(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_external_api_rest_required_fields( - request_type=apihub_service.DeleteExternalApiRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_external_api._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_external_api(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_external_api_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_external_api._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_external_api_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_delete_external_api" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteExternalApiRequest.pb( - apihub_service.DeleteExternalApiRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteExternalApiRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_external_api( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_external_api_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteExternalApiRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/externalApis/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_external_api(request) - - -def test_delete_external_api_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/externalApis/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_external_api(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/externalApis/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_external_api_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_external_api( - apihub_service.DeleteExternalApiRequest(), - name="name_value", - ) - - -def test_delete_external_api_rest_error(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListExternalApisRequest, - dict, - ], -) -def test_list_external_apis_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListExternalApisResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListExternalApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_external_apis(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExternalApisPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_external_apis_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_external_apis in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_external_apis - ] = mock_rpc - - request = {} - client.list_external_apis(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_external_apis(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_external_apis_rest_required_fields( - request_type=apihub_service.ListExternalApisRequest, -): - transport_class = transports.ApiHubRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_external_apis._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_external_apis._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListExternalApisResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListExternalApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_external_apis(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_external_apis_rest_unset_required_fields(): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_external_apis._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_external_apis_rest_interceptors(null_interceptor): - transport = transports.ApiHubRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ApiHubRestInterceptor(), - ) - client = ApiHubClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubRestInterceptor, "post_list_external_apis" - ) as post, mock.patch.object( - transports.ApiHubRestInterceptor, "pre_list_external_apis" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListExternalApisRequest.pb( - apihub_service.ListExternalApisRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListExternalApisResponse.to_json( - apihub_service.ListExternalApisResponse() - ) - - request = apihub_service.ListExternalApisRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListExternalApisResponse() - - client.list_external_apis( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_external_apis_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListExternalApisRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_external_apis(request) - - -def test_list_external_apis_rest_flattened(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListExternalApisResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListExternalApisResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_external_apis(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/externalApis" - % client.transport._host, - args[1], - ) - - -def test_list_external_apis_rest_flattened_error(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_external_apis( - apihub_service.ListExternalApisRequest(), - parent="parent_value", - ) - - -def test_list_external_apis_rest_pager(transport: str = "rest"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - next_page_token="abc", - ), - apihub_service.ListExternalApisResponse( - external_apis=[], - next_page_token="def", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - ], - next_page_token="ghi", - ), - apihub_service.ListExternalApisResponse( - external_apis=[ - common_fields.ExternalApi(), - common_fields.ExternalApi(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListExternalApisResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_external_apis(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.ExternalApi) for i in results) - - pages = list(client.list_external_apis(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ApiHubClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ApiHubGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubGrpcTransport, - transports.ApiHubGrpcAsyncIOTransport, - transports.ApiHubRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ApiHubClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ApiHubGrpcTransport, - ) - - -def test_api_hub_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ApiHubTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_api_hub_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.ApiHubTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_api", - "get_api", - "list_apis", - "update_api", - "delete_api", - "create_version", - "get_version", - "list_versions", - "update_version", - "delete_version", - "create_spec", - "get_spec", - "get_spec_contents", - "list_specs", - "update_spec", - "delete_spec", - "get_api_operation", - "list_api_operations", - "get_definition", - "create_deployment", - "get_deployment", - "list_deployments", - "update_deployment", - "delete_deployment", - "create_attribute", - "get_attribute", - "update_attribute", - "delete_attribute", - "list_attributes", - "search_resources", - "create_external_api", - "get_external_api", - "update_external_api", - "delete_external_api", - "list_external_apis", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_api_hub_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_api_hub_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubTransport() - adc.assert_called_once() - - -def test_api_hub_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ApiHubClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubGrpcTransport, - transports.ApiHubGrpcAsyncIOTransport, - ], -) -def test_api_hub_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubGrpcTransport, - transports.ApiHubGrpcAsyncIOTransport, - transports.ApiHubRestTransport, - ], -) -def test_api_hub_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ApiHubGrpcTransport, grpc_helpers), - (transports.ApiHubGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_api_hub_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubGrpcTransport, transports.ApiHubGrpcAsyncIOTransport], -) -def test_api_hub_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_api_hub_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.ApiHubRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_host_no_port(transport_name): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_host_with_port(transport_name): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_api_hub_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ApiHubClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ApiHubClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_api._session - session2 = client2.transport.create_api._session - assert session1 != session2 - session1 = client1.transport.get_api._session - session2 = client2.transport.get_api._session - assert session1 != session2 - session1 = client1.transport.list_apis._session - session2 = client2.transport.list_apis._session - assert session1 != session2 - session1 = client1.transport.update_api._session - session2 = client2.transport.update_api._session - assert session1 != session2 - session1 = client1.transport.delete_api._session - session2 = client2.transport.delete_api._session - assert session1 != session2 - session1 = client1.transport.create_version._session - session2 = client2.transport.create_version._session - assert session1 != session2 - session1 = client1.transport.get_version._session - session2 = client2.transport.get_version._session - assert session1 != session2 - session1 = client1.transport.list_versions._session - session2 = client2.transport.list_versions._session - assert session1 != session2 - session1 = client1.transport.update_version._session - session2 = client2.transport.update_version._session - assert session1 != session2 - session1 = client1.transport.delete_version._session - session2 = client2.transport.delete_version._session - assert session1 != session2 - session1 = client1.transport.create_spec._session - session2 = client2.transport.create_spec._session - assert session1 != session2 - session1 = client1.transport.get_spec._session - session2 = client2.transport.get_spec._session - assert session1 != session2 - session1 = client1.transport.get_spec_contents._session - session2 = client2.transport.get_spec_contents._session - assert session1 != session2 - session1 = client1.transport.list_specs._session - session2 = client2.transport.list_specs._session - assert session1 != session2 - session1 = client1.transport.update_spec._session - session2 = client2.transport.update_spec._session - assert session1 != session2 - session1 = client1.transport.delete_spec._session - session2 = client2.transport.delete_spec._session - assert session1 != session2 - session1 = client1.transport.get_api_operation._session - session2 = client2.transport.get_api_operation._session - assert session1 != session2 - session1 = client1.transport.list_api_operations._session - session2 = client2.transport.list_api_operations._session - assert session1 != session2 - session1 = client1.transport.get_definition._session - session2 = client2.transport.get_definition._session - assert session1 != session2 - session1 = client1.transport.create_deployment._session - session2 = client2.transport.create_deployment._session - assert session1 != session2 - session1 = client1.transport.get_deployment._session - session2 = client2.transport.get_deployment._session - assert session1 != session2 - session1 = client1.transport.list_deployments._session - session2 = client2.transport.list_deployments._session - assert session1 != session2 - session1 = client1.transport.update_deployment._session - session2 = client2.transport.update_deployment._session - assert session1 != session2 - session1 = client1.transport.delete_deployment._session - session2 = client2.transport.delete_deployment._session - assert session1 != session2 - session1 = client1.transport.create_attribute._session - session2 = client2.transport.create_attribute._session - assert session1 != session2 - session1 = client1.transport.get_attribute._session - session2 = client2.transport.get_attribute._session - assert session1 != session2 - session1 = client1.transport.update_attribute._session - session2 = client2.transport.update_attribute._session - assert session1 != session2 - session1 = client1.transport.delete_attribute._session - session2 = client2.transport.delete_attribute._session - assert session1 != session2 - session1 = client1.transport.list_attributes._session - session2 = client2.transport.list_attributes._session - assert session1 != session2 - session1 = client1.transport.search_resources._session - session2 = client2.transport.search_resources._session - assert session1 != session2 - session1 = client1.transport.create_external_api._session - session2 = client2.transport.create_external_api._session - assert session1 != session2 - session1 = client1.transport.get_external_api._session - session2 = client2.transport.get_external_api._session - assert session1 != session2 - session1 = client1.transport.update_external_api._session - session2 = client2.transport.update_external_api._session - assert session1 != session2 - session1 = client1.transport.delete_external_api._session - session2 = client2.transport.delete_external_api._session - assert session1 != session2 - session1 = client1.transport.list_external_apis._session - session2 = client2.transport.list_external_apis._session - assert session1 != session2 - - -def test_api_hub_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_api_hub_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubGrpcTransport, transports.ApiHubGrpcAsyncIOTransport], -) -def test_api_hub_transport_channel_mtls_with_client_cert_source(transport_class): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubGrpcTransport, transports.ApiHubGrpcAsyncIOTransport], -) -def test_api_hub_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_api_path(): - project = "squid" - location = "clam" - api = "whelk" - expected = "projects/{project}/locations/{location}/apis/{api}".format( - project=project, - location=location, - api=api, - ) - actual = ApiHubClient.api_path(project, location, api) - assert expected == actual - - -def test_parse_api_path(): - expected = { - "project": "octopus", - "location": "oyster", - "api": "nudibranch", - } - path = ApiHubClient.api_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_api_path(path) - assert expected == actual - - -def test_api_operation_path(): - project = "cuttlefish" - location = "mussel" - api = "winkle" - version = "nautilus" - operation = "scallop" - expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}".format( - project=project, - location=location, - api=api, - version=version, - operation=operation, - ) - actual = ApiHubClient.api_operation_path(project, location, api, version, operation) - assert expected == actual - - -def test_parse_api_operation_path(): - expected = { - "project": "abalone", - "location": "squid", - "api": "clam", - "version": "whelk", - "operation": "octopus", - } - path = ApiHubClient.api_operation_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_api_operation_path(path) - assert expected == actual - - -def test_attribute_path(): - project = "oyster" - location = "nudibranch" - attribute = "cuttlefish" - expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( - project=project, - location=location, - attribute=attribute, - ) - actual = ApiHubClient.attribute_path(project, location, attribute) - assert expected == actual - - -def test_parse_attribute_path(): - expected = { - "project": "mussel", - "location": "winkle", - "attribute": "nautilus", - } - path = ApiHubClient.attribute_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_attribute_path(path) - assert expected == actual - - -def test_definition_path(): - project = "scallop" - location = "abalone" - api = "squid" - version = "clam" - definition = "whelk" - expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/definitions/{definition}".format( - project=project, - location=location, - api=api, - version=version, - definition=definition, - ) - actual = ApiHubClient.definition_path(project, location, api, version, definition) - assert expected == actual - - -def test_parse_definition_path(): - expected = { - "project": "octopus", - "location": "oyster", - "api": "nudibranch", - "version": "cuttlefish", - "definition": "mussel", - } - path = ApiHubClient.definition_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_definition_path(path) - assert expected == actual - - -def test_deployment_path(): - project = "winkle" - location = "nautilus" - deployment = "scallop" - expected = ( - "projects/{project}/locations/{location}/deployments/{deployment}".format( - project=project, - location=location, - deployment=deployment, - ) - ) - actual = ApiHubClient.deployment_path(project, location, deployment) - assert expected == actual - - -def test_parse_deployment_path(): - expected = { - "project": "abalone", - "location": "squid", - "deployment": "clam", - } - path = ApiHubClient.deployment_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_deployment_path(path) - assert expected == actual - - -def test_external_api_path(): - project = "whelk" - location = "octopus" - external_api = "oyster" - expected = ( - "projects/{project}/locations/{location}/externalApis/{external_api}".format( - project=project, - location=location, - external_api=external_api, - ) - ) - actual = ApiHubClient.external_api_path(project, location, external_api) - assert expected == actual - - -def test_parse_external_api_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "external_api": "mussel", - } - path = ApiHubClient.external_api_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_external_api_path(path) - assert expected == actual - - -def test_spec_path(): - project = "winkle" - location = "nautilus" - api = "scallop" - version = "abalone" - spec = "squid" - expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( - project=project, - location=location, - api=api, - version=version, - spec=spec, - ) - actual = ApiHubClient.spec_path(project, location, api, version, spec) - assert expected == actual - - -def test_parse_spec_path(): - expected = { - "project": "clam", - "location": "whelk", - "api": "octopus", - "version": "oyster", - "spec": "nudibranch", - } - path = ApiHubClient.spec_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_spec_path(path) - assert expected == actual - - -def test_version_path(): - project = "cuttlefish" - location = "mussel" - api = "winkle" - version = "nautilus" - expected = ( - "projects/{project}/locations/{location}/apis/{api}/versions/{version}".format( - project=project, - location=location, - api=api, - version=version, - ) - ) - actual = ApiHubClient.version_path(project, location, api, version) - assert expected == actual - - -def test_parse_version_path(): - expected = { - "project": "scallop", - "location": "abalone", - "api": "squid", - "version": "clam", - } - path = ApiHubClient.version_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_version_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = ApiHubClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = ApiHubClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = ApiHubClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = ApiHubClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = ApiHubClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = ApiHubClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format( - project=project, - ) - actual = ApiHubClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = ApiHubClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = ApiHubClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = ApiHubClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.ApiHubTransport, "_prep_wrapped_messages" - ) as prep: - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.ApiHubTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = ApiHubClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) + client = ApiHubClient(transport=transport) + assert client.transport is transport @pytest.mark.parametrize( - "request_type", + "transport_class", [ - locations_pb2.GetLocationRequest, - dict, + transports.ApiHubRestTransport, ], ) -def test_get_location_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() @pytest.mark.parametrize( - "request_type", + "transport_name", [ - locations_pb2.ListLocationsRequest, - dict, + "rest", ], ) -def test_list_locations_rest(request_type): - client = ApiHubClient( +def test_transport_kind(transport_name): + transport = ApiHubClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + assert transport.kind == transport_name - response = client.list_locations(request) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) +def test_api_hub_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ApiHubTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_api_hub_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ApiHubTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_api", + "get_api", + "list_apis", + "update_api", + "delete_api", + "create_version", + "get_version", + "list_versions", + "update_version", + "delete_version", + "create_spec", + "get_spec", + "get_spec_contents", + "list_specs", + "update_spec", + "delete_spec", + "get_api_operation", + "list_api_operations", + "get_definition", + "create_deployment", + "get_deployment", + "list_deployments", + "update_deployment", + "delete_deployment", + "create_attribute", + "get_attribute", + "update_attribute", + "delete_attribute", + "list_attributes", + "search_resources", + "create_external_api", + "get_external_api", + "update_external_api", + "delete_external_api", + "list_external_apis", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - + with pytest.raises(NotImplementedError): + transport.close() -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +def test_api_hub_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - response = client.cancel_operation(request) - # Establish that the response is the type that we expect. - assert response is None +def test_api_hub_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.api_hub.transports.ApiHubTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubTransport() + adc.assert_called_once() -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_api_hub_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ApiHubClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) +def test_api_hub_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ApiHubRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.DeleteOperationRequest, - dict, + "rest", ], ) -def test_delete_operation_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): +def test_api_hub_host_no_port(transport_name): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.GetOperationRequest, - dict, + "rest", ], ) -def test_get_operation_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): +def test_api_hub_host_with_port(transport_name): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.ListOperationsRequest, - dict, + "rest", ], ) -def test_list_operations_rest(request_type): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +def test_api_hub_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ApiHubClient( + credentials=creds1, + transport=transport_name, ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + client2 = ApiHubClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_api._session + session2 = client2.transport.create_api._session + assert session1 != session2 + session1 = client1.transport.get_api._session + session2 = client2.transport.get_api._session + assert session1 != session2 + session1 = client1.transport.list_apis._session + session2 = client2.transport.list_apis._session + assert session1 != session2 + session1 = client1.transport.update_api._session + session2 = client2.transport.update_api._session + assert session1 != session2 + session1 = client1.transport.delete_api._session + session2 = client2.transport.delete_api._session + assert session1 != session2 + session1 = client1.transport.create_version._session + session2 = client2.transport.create_version._session + assert session1 != session2 + session1 = client1.transport.get_version._session + session2 = client2.transport.get_version._session + assert session1 != session2 + session1 = client1.transport.list_versions._session + session2 = client2.transport.list_versions._session + assert session1 != session2 + session1 = client1.transport.update_version._session + session2 = client2.transport.update_version._session + assert session1 != session2 + session1 = client1.transport.delete_version._session + session2 = client2.transport.delete_version._session + assert session1 != session2 + session1 = client1.transport.create_spec._session + session2 = client2.transport.create_spec._session + assert session1 != session2 + session1 = client1.transport.get_spec._session + session2 = client2.transport.get_spec._session + assert session1 != session2 + session1 = client1.transport.get_spec_contents._session + session2 = client2.transport.get_spec_contents._session + assert session1 != session2 + session1 = client1.transport.list_specs._session + session2 = client2.transport.list_specs._session + assert session1 != session2 + session1 = client1.transport.update_spec._session + session2 = client2.transport.update_spec._session + assert session1 != session2 + session1 = client1.transport.delete_spec._session + session2 = client2.transport.delete_spec._session + assert session1 != session2 + session1 = client1.transport.get_api_operation._session + session2 = client2.transport.get_api_operation._session + assert session1 != session2 + session1 = client1.transport.list_api_operations._session + session2 = client2.transport.list_api_operations._session + assert session1 != session2 + session1 = client1.transport.get_definition._session + session2 = client2.transport.get_definition._session + assert session1 != session2 + session1 = client1.transport.create_deployment._session + session2 = client2.transport.create_deployment._session + assert session1 != session2 + session1 = client1.transport.get_deployment._session + session2 = client2.transport.get_deployment._session + assert session1 != session2 + session1 = client1.transport.list_deployments._session + session2 = client2.transport.list_deployments._session + assert session1 != session2 + session1 = client1.transport.update_deployment._session + session2 = client2.transport.update_deployment._session + assert session1 != session2 + session1 = client1.transport.delete_deployment._session + session2 = client2.transport.delete_deployment._session + assert session1 != session2 + session1 = client1.transport.create_attribute._session + session2 = client2.transport.create_attribute._session + assert session1 != session2 + session1 = client1.transport.get_attribute._session + session2 = client2.transport.get_attribute._session + assert session1 != session2 + session1 = client1.transport.update_attribute._session + session2 = client2.transport.update_attribute._session + assert session1 != session2 + session1 = client1.transport.delete_attribute._session + session2 = client2.transport.delete_attribute._session + assert session1 != session2 + session1 = client1.transport.list_attributes._session + session2 = client2.transport.list_attributes._session + assert session1 != session2 + session1 = client1.transport.search_resources._session + session2 = client2.transport.search_resources._session + assert session1 != session2 + session1 = client1.transport.create_external_api._session + session2 = client2.transport.create_external_api._session + assert session1 != session2 + session1 = client1.transport.get_external_api._session + session2 = client2.transport.get_external_api._session + assert session1 != session2 + session1 = client1.transport.update_external_api._session + session2 = client2.transport.update_external_api._session + assert session1 != session2 + session1 = client1.transport.delete_external_api._session + session2 = client2.transport.delete_external_api._session + assert session1 != session2 + session1 = client1.transport.list_external_apis._session + session2 = client2.transport.list_external_apis._session + assert session1 != session2 -def test_delete_operation(transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_api_path(): + project = "squid" + location = "clam" + api = "whelk" + expected = "projects/{project}/locations/{location}/apis/{api}".format( + project=project, + location=location, + api=api, ) + actual = ApiHubClient.api_path(project, location, api) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_api_path(): + expected = { + "project": "octopus", + "location": "oyster", + "api": "nudibranch", + } + path = ApiHubClient.api_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = ApiHubClient.parse_api_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_api_operation_path(): + project = "cuttlefish" + location = "mussel" + api = "winkle" + version = "nautilus" + operation = "scallop" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}".format( + project=project, + location=location, + api=api, + version=version, + operation=operation, ) + actual = ApiHubClient.api_operation_path(project, location, api, version, operation) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_api_operation_path(): + expected = { + "project": "abalone", + "location": "squid", + "api": "clam", + "version": "whelk", + "operation": "octopus", + } + path = ApiHubClient.api_operation_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = ApiHubClient.parse_api_operation_path(path) + assert expected == actual -def test_delete_operation_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_attribute_path(): + project = "oyster" + location = "nudibranch" + attribute = "cuttlefish" + expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, ) + actual = ApiHubClient.attribute_path(project, location, attribute) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_attribute_path(): + expected = { + "project": "mussel", + "location": "winkle", + "attribute": "nautilus", + } + path = ApiHubClient.attribute_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_attribute_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_definition_path(): + project = "scallop" + location = "abalone" + api = "squid" + version = "clam" + definition = "whelk" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/definitions/{definition}".format( + project=project, + location=location, + api=api, + version=version, + definition=definition, ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + actual = ApiHubClient.definition_path(project, location, api, version, definition) + assert expected == actual -def test_delete_operation_from_dict(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None +def test_parse_definition_path(): + expected = { + "project": "octopus", + "location": "oyster", + "api": "nudibranch", + "version": "cuttlefish", + "definition": "mussel", + } + path = ApiHubClient.definition_path(**expected) - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + # Check that the path construction is reversible. + actual = ApiHubClient.parse_definition_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_deployment_path(): + project = "winkle" + location = "nautilus" + deployment = "scallop" + expected = ( + "projects/{project}/locations/{location}/deployments/{deployment}".format( + project=project, + location=location, + deployment=deployment, ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + actual = ApiHubClient.deployment_path(project, location, deployment) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_deployment_path(): + expected = { + "project": "abalone", + "location": "squid", + "deployment": "clam", + } + path = ApiHubClient.deployment_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = ApiHubClient.parse_deployment_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_external_api_path(): + project = "whelk" + location = "octopus" + external_api = "oyster" + expected = ( + "projects/{project}/locations/{location}/externalApis/{external_api}".format( + project=project, + location=location, + external_api=external_api, + ) ) + actual = ApiHubClient.external_api_path(project, location, external_api) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_external_api_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "external_api": "mussel", + } + path = ApiHubClient.external_api_path(**expected) - # Establish that the response is the type that we expect. - assert response is None + # Check that the path construction is reversible. + actual = ApiHubClient.parse_external_api_path(path) + assert expected == actual -def test_cancel_operation_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_spec_path(): + project = "winkle" + location = "nautilus" + api = "scallop" + version = "abalone" + spec = "squid" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( + project=project, + location=location, + api=api, + version=version, + spec=spec, ) + actual = ApiHubClient.spec_path(project, location, api, version, spec) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_spec_path(): + expected = { + "project": "clam", + "location": "whelk", + "api": "octopus", + "version": "oyster", + "spec": "nudibranch", + } + path = ApiHubClient.spec_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_spec_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_version_path(): + project = "cuttlefish" + location = "mussel" + api = "winkle" + version = "nautilus" + expected = ( + "projects/{project}/locations/{location}/apis/{api}/versions/{version}".format( + project=project, + location=location, + api=api, + version=version, + ) ) + actual = ApiHubClient.version_path(project, location, api, version) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_version_path(): + expected = { + "project": "scallop", + "location": "abalone", + "api": "squid", + "version": "clam", + } + path = ApiHubClient.version_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_version_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + actual = ApiHubClient.common_billing_account_path(billing_account) + assert expected == actual - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ApiHubClient.common_billing_account_path(**expected) -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, ) + actual = ApiHubClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ApiHubClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = ApiHubClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ApiHubClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, ) + actual = ApiHubClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ApiHubClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = ApiHubClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ApiHubClient.common_location_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = ApiHubClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() - response = client.get_operation( - request={ - "name": "locations", - } + with mock.patch.object( + transports.ApiHubTransport, "_prep_wrapped_messages" + ) as prep: + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() - + prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } + with mock.patch.object( + transports.ApiHubTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ApiHubClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ApiHubAsyncClient( +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_list_locations_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.delete_operation(request) + # Establish that the response is the type that we expect. + assert response is None -def test_get_location(transport: str = "grpc"): + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ApiHubAsyncClient( +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_get_location_field_headers(): - client = ApiHubClient(credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + response = client.get_operation(request) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ApiHubAsyncClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ApiHubClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = ApiHubClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ApiHubAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -31071,7 +14654,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = ApiHubClient( @@ -31088,8 +14670,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (ApiHubClient, transports.ApiHubGrpcTransport), - (ApiHubAsyncClient, transports.ApiHubGrpcAsyncIOTransport), + (ApiHubClient, transports.ApiHubRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py index bf0a3da3c9e3..525149783ded 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py @@ -48,7 +48,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.api_hub_dependencies import ( - ApiHubDependenciesAsyncClient, ApiHubDependenciesClient, pagers, transports, @@ -222,11 +221,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubDependenciesAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -315,7 +309,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubDependenciesClient, transports.ApiHubDependenciesGrpcTransport, "grpc"), (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, "rest"), ], ) @@ -395,8 +388,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubDependenciesClient, "grpc"), - (ApiHubDependenciesAsyncClient, "grpc_asyncio"), (ApiHubDependenciesClient, "rest"), ], ) @@ -423,8 +414,6 @@ def test_api_hub_dependencies_client_from_service_account_info( @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.ApiHubDependenciesGrpcTransport, "grpc"), - (transports.ApiHubDependenciesGrpcAsyncIOTransport, "grpc_asyncio"), (transports.ApiHubDependenciesRestTransport, "rest"), ], ) @@ -449,8 +438,6 @@ def test_api_hub_dependencies_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubDependenciesClient, "grpc"), - (ApiHubDependenciesAsyncClient, "grpc_asyncio"), (ApiHubDependenciesClient, "rest"), ], ) @@ -484,24 +471,17 @@ def test_api_hub_dependencies_client_from_service_account_file( def test_api_hub_dependencies_client_get_transport_class(): transport = ApiHubDependenciesClient.get_transport_class() available_transports = [ - transports.ApiHubDependenciesGrpcTransport, transports.ApiHubDependenciesRestTransport, ] assert transport in available_transports - transport = ApiHubDependenciesClient.get_transport_class("grpc") - assert transport == transports.ApiHubDependenciesGrpcTransport + transport = ApiHubDependenciesClient.get_transport_class("rest") + assert transport == transports.ApiHubDependenciesRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubDependenciesClient, transports.ApiHubDependenciesGrpcTransport, "grpc"), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, "rest"), ], ) @@ -510,11 +490,6 @@ def test_api_hub_dependencies_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubDependenciesAsyncClient), -) def test_api_hub_dependencies_client_client_options( client_class, transport_class, transport_name ): @@ -648,30 +623,6 @@ def test_api_hub_dependencies_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - ( - ApiHubDependenciesClient, - transports.ApiHubDependenciesGrpcTransport, - "grpc", - "true", - ), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - ( - ApiHubDependenciesClient, - transports.ApiHubDependenciesGrpcTransport, - "grpc", - "false", - ), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), ( ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, @@ -691,11 +642,6 @@ def test_api_hub_dependencies_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubDependenciesAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_api_hub_dependencies_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -799,19 +745,12 @@ def test_api_hub_dependencies_client_mtls_env_auto( ) -@pytest.mark.parametrize( - "client_class", [ApiHubDependenciesClient, ApiHubDependenciesAsyncClient] -) +@pytest.mark.parametrize("client_class", [ApiHubDependenciesClient]) @mock.patch.object( ApiHubDependenciesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ApiHubDependenciesAsyncClient), -) def test_api_hub_dependencies_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -903,19 +842,12 @@ def test_api_hub_dependencies_client_get_mtls_endpoint_and_cert_source(client_cl ) -@pytest.mark.parametrize( - "client_class", [ApiHubDependenciesClient, ApiHubDependenciesAsyncClient] -) +@pytest.mark.parametrize("client_class", [ApiHubDependenciesClient]) @mock.patch.object( ApiHubDependenciesClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubDependenciesClient), ) -@mock.patch.object( - ApiHubDependenciesAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubDependenciesAsyncClient), -) def test_api_hub_dependencies_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -992,12 +924,6 @@ def test_api_hub_dependencies_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubDependenciesClient, transports.ApiHubDependenciesGrpcTransport, "grpc"), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, "rest"), ], ) @@ -1029,18 +955,6 @@ def test_api_hub_dependencies_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - ApiHubDependenciesClient, - transports.ApiHubDependenciesGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), ( ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport, @@ -1073,96 +987,6 @@ def test_api_hub_dependencies_client_client_options_credentials_file( ) -def test_api_hub_dependencies_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = ApiHubDependenciesClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - ApiHubDependenciesClient, - transports.ApiHubDependenciesGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_api_hub_dependencies_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1170,34 +994,119 @@ def test_api_hub_dependencies_client_create_channel_credentials_file( dict, ], ) -def test_create_dependency(request_type, transport: str = "grpc"): +def test_create_dependency_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["dependency"] = { + "name": "name_value", + "consumer": { + "operation_resource_name": "operation_resource_name_value", + "external_api_resource_name": "external_api_resource_name_value", + "display_name": "display_name_value", + }, + "supplier": {}, + "state": 1, + "description": "description_value", + "discovery_mode": 1, + "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, + "create_time": {}, + "update_time": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency( + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.CreateDependencyRequest.meta.fields["dependency"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dependency"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dependency"][field])): + del request_init["dependency"][field][i][subfield] + else: + del request_init["dependency"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency( name="name_value", state=common_fields.Dependency.State.PROPOSED, description="description_value", discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, ) - response = client.create_dependency(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateDependencyRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_dependency(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Dependency) @@ -1207,66 +1116,13 @@ def test_create_dependency(request_type, transport: str = "grpc"): assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL -def test_create_dependency_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDependencyRequest() - - -def test_create_dependency_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.CreateDependencyRequest( - parent="parent_value", - dependency_id="dependency_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_dependency(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDependencyRequest( - parent="parent_value", - dependency_id="dependency_id_value", - ) - - -def test_create_dependency_use_cached_wrapped_rpc(): +def test_create_dependency_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1284,6 +1140,7 @@ def test_create_dependency_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_dependency ] = mock_rpc + request = {} client.create_dependency(request) @@ -1297,287 +1154,235 @@ def test_create_dependency_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_dependency_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_create_dependency_rest_required_fields( + request_type=apihub_service.CreateDependencyRequest, +): + transport_class = transports.ApiHubDependenciesRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.create_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.CreateDependencyRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_create_dependency_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_dependency - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_dependency - ] = mock_rpc - - request = {} - await client.create_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.create_dependency(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dependency._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("dependency_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_dependency_async( - transport: str = "grpc_asyncio", request_type=apihub_service.CreateDependencyRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.create_dependency(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.CreateDependencyRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_dependency(request) -@pytest.mark.asyncio -async def test_create_dependency_async_from_dict(): - await test_create_dependency_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_dependency_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateDependencyRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - call.return_value = common_fields.Dependency() - client.create_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_dependency._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("dependencyId",)) + & set( + ( + "parent", + "dependency", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_dependency_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.CreateDependencyRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubDependenciesClient(transport=transport) with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_create_dependency" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_create_dependency" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.CreateDependencyRequest.pb( + apihub_service.CreateDependencyRequest() ) - await client.create_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Dependency.to_json( + common_fields.Dependency() + ) -def test_create_dependency_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.CreateDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Dependency() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_dependency( - parent="parent_value", - dependency=common_fields.Dependency(name="name_value"), - dependency_id="dependency_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].dependency - mock_val = common_fields.Dependency(name="name_value") - assert arg == mock_val - arg = args[0].dependency_id - mock_val = "dependency_id_value" - assert arg == mock_val - - -def test_create_dependency_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.CreateDependencyRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dependency( - apihub_service.CreateDependencyRequest(), - parent="parent_value", - dependency=common_fields.Dependency(name="name_value"), - dependency_id="dependency_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_dependency(request) -@pytest.mark.asyncio -async def test_create_dependency_flattened_async(): - client = ApiHubDependenciesAsyncClient( +def test_create_dependency_rest_flattened(): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_dependency( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", dependency=common_fields.Dependency(name="name_value"), dependency_id="dependency_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_dependency(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].dependency - mock_val = common_fields.Dependency(name="name_value") - assert arg == mock_val - arg = args[0].dependency_id - mock_val = "dependency_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_dependency_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dependencies" + % client.transport._host, + args[1], + ) + + +def test_create_dependency_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_dependency( + client.create_dependency( apihub_service.CreateDependencyRequest(), parent="parent_value", dependency=common_fields.Dependency(name="name_value"), @@ -1585,6 +1390,12 @@ async def test_create_dependency_flattened_error_async(): ) +def test_create_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1592,34 +1403,38 @@ async def test_create_dependency_flattened_error_async(): dict, ], ) -def test_get_dependency(request_type, transport: str = "grpc"): +def test_get_dependency_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency( name="name_value", state=common_fields.Dependency.State.PROPOSED, description="description_value", discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, ) - response = client.get_dependency(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDependencyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_dependency(request) + + # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Dependency) assert response.name == "name_value" assert response.state == common_fields.Dependency.State.PROPOSED @@ -1627,60 +1442,13 @@ def test_get_dependency(request_type, transport: str = "grpc"): assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL -def test_get_dependency_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDependencyRequest() - - -def test_get_dependency_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.GetDependencyRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_dependency(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDependencyRequest( - name="name_value", - ) - - -def test_get_dependency_use_cached_wrapped_rpc(): +def test_get_dependency_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1696,6 +1464,7 @@ def test_get_dependency_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_dependency] = mock_rpc + request = {} client.get_dependency(request) @@ -1709,207 +1478,218 @@ def test_get_dependency_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_dependency_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.get_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.GetDependencyRequest() - - -@pytest.mark.asyncio -async def test_get_dependency_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_dependency_rest_required_fields( + request_type=apihub_service.GetDependencyRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubDependenciesRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_dependency - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_dependency - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_dependency(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_dependency(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_dependency_async( - transport: str = "grpc_asyncio", request_type=apihub_service.GetDependencyRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.get_dependency(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.GetDependencyRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_dependency(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_get_dependency_async_from_dict(): - await test_get_dependency_async(request_type=dict) +def test_get_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) -def test_get_dependency_field_headers(): - client = ApiHubDependenciesClient( + unset_fields = transport.get_dependency._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) + client = ApiHubDependenciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_get_dependency" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_get_dependency" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.GetDependencyRequest.pb( + apihub_service.GetDependencyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDependencyRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Dependency.to_json( + common_fields.Dependency() + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - call.return_value = common_fields.Dependency() - client.get_dependency(request) + request = apihub_service.GetDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Dependency() - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.get_dependency( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -@pytest.mark.asyncio -async def test_get_dependency_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +def test_get_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.GetDependencyRequest +): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.GetDependencyRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() - ) - await client.get_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_dependency(request) -def test_get_dependency_flattened(): +def test_get_dependency_rest_flattened(): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_dependency( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_dependency(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dependencies/*}" + % client.transport._host, + args[1], + ) -def test_get_dependency_flattened_error(): +def test_get_dependency_rest_flattened_error(transport: str = "rest"): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1921,49 +1701,11 @@ def test_get_dependency_flattened_error(): ) -@pytest.mark.asyncio -async def test_get_dependency_flattened_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_dependency), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_dependency( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_dependency_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_dependency( - apihub_service.GetDependencyRequest(), - name="name_value", - ) - @pytest.mark.parametrize( "request_type", @@ -1972,34 +1714,123 @@ async def test_get_dependency_flattened_error_async(): dict, ], ) -def test_update_dependency(request_type, transport: str = "grpc"): +def test_update_dependency_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "dependency": { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + } + request_init["dependency"] = { + "name": "projects/sample1/locations/sample2/dependencies/sample3", + "consumer": { + "operation_resource_name": "operation_resource_name_value", + "external_api_resource_name": "external_api_resource_name_value", + "display_name": "display_name_value", + }, + "supplier": {}, + "state": 1, + "description": "description_value", + "discovery_mode": 1, + "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, + "create_time": {}, + "update_time": {}, + "attributes": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency( + # Determine if the message type is proto-plus or protobuf + test_field = apihub_service.UpdateDependencyRequest.meta.fields["dependency"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dependency"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dependency"][field])): + del request_init["dependency"][field][i][subfield] + else: + del request_init["dependency"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency( name="name_value", state=common_fields.Dependency.State.PROPOSED, description="description_value", discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, ) - response = client.update_dependency(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateDependencyRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_dependency(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.Dependency) @@ -2009,60 +1840,13 @@ def test_update_dependency(request_type, transport: str = "grpc"): assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL -def test_update_dependency_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDependencyRequest() - - -def test_update_dependency_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.UpdateDependencyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_dependency(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDependencyRequest() - - -def test_update_dependency_use_cached_wrapped_rpc(): +def test_update_dependency_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2080,6 +1864,7 @@ def test_update_dependency_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_dependency ] = mock_rpc + request = {} client.update_dependency(request) @@ -2093,284 +1878,249 @@ def test_update_dependency_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_dependency_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.update_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.UpdateDependencyRequest() - - -@pytest.mark.asyncio -async def test_update_dependency_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_update_dependency_rest_required_fields( + request_type=apihub_service.UpdateDependencyRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.update_dependency - in client._client._transport._wrapped_methods - ) + transport_class = transports.ApiHubDependenciesRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_dependency - ] = mock_rpc + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.update_dependency(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - await client.update_dependency(request) + # verify required fields with default values are now present - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dependency._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone -@pytest.mark.asyncio -async def test_update_dependency_async( - transport: str = "grpc_asyncio", request_type=apihub_service.UpdateDependencyRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - ) - response = await client.update_dependency(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.UpdateDependencyRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_dependency(request) -@pytest.mark.asyncio -async def test_update_dependency_async_from_dict(): - await test_update_dependency_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_update_dependency_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateDependencyRequest() - - request.dependency.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - call.return_value = common_fields.Dependency() - client.update_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_update_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "dependency.name=name_value", - ) in kw["metadata"] + unset_fields = transport.update_dependency._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "dependency", + "updateMask", + ) + ) + ) -@pytest.mark.asyncio -async def test_update_dependency_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.UpdateDependencyRequest() - - request.dependency.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubDependenciesClient(transport=transport) with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_update_dependency" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_update_dependency" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.UpdateDependencyRequest.pb( + apihub_service.UpdateDependencyRequest() ) - await client.update_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "dependency.name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.Dependency.to_json( + common_fields.Dependency() + ) -def test_update_dependency_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.UpdateDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.Dependency() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.update_dependency( - dependency=common_fields.Dependency(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].dependency - mock_val = common_fields.Dependency(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_update_dependency_flattened_error(): +def test_update_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.UpdateDependencyRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_dependency( - apihub_service.UpdateDependencyRequest(), - dependency=common_fields.Dependency(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "dependency": { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_dependency(request) -@pytest.mark.asyncio -async def test_update_dependency_flattened_async(): - client = ApiHubDependenciesAsyncClient( +def test_update_dependency_rest_flattened(): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.Dependency() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.Dependency() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.Dependency() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_dependency( + # get arguments that satisfy an http rule for this method + sample_request = { + "dependency": { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( dependency=common_fields.Dependency(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.Dependency.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_dependency(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].dependency - mock_val = common_fields.Dependency(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_dependency_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{dependency.name=projects/*/locations/*/dependencies/*}" + % client.transport._host, + args[1], + ) + + +def test_update_dependency_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_dependency( + client.update_dependency( apihub_service.UpdateDependencyRequest(), dependency=common_fields.Dependency(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) +def test_update_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2378,92 +2128,41 @@ async def test_update_dependency_flattened_error_async(): dict, ], ) -def test_delete_dependency(request_type, transport: str = "grpc"): +def test_delete_dependency_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_dependency(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteDependencyRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_dependency(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_dependency_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDependencyRequest() - - -def test_delete_dependency_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.DeleteDependencyRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_dependency(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDependencyRequest( - name="name_value", - ) - - -def test_delete_dependency_use_cached_wrapped_rpc(): +def test_delete_dependency_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2481,6 +2180,7 @@ def test_delete_dependency_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_dependency ] = mock_rpc + request = {} client.delete_dependency(request) @@ -2494,252 +2194,222 @@ def test_delete_dependency_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_dependency_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dependency() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.DeleteDependencyRequest() - - -@pytest.mark.asyncio -async def test_delete_dependency_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_delete_dependency_rest_required_fields( + request_type=apihub_service.DeleteDependencyRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubDependenciesRestTransport - # Ensure method has been cached - assert ( - client._client._transport.delete_dependency - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_dependency - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.delete_dependency(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.delete_dependency(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dependency._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_delete_dependency_async( - transport: str = "grpc_asyncio", request_type=apihub_service.DeleteDependencyRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dependency(request) + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.DeleteDependencyRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Establish that the response is the type that we expect. - assert response is None + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_dependency(request) -@pytest.mark.asyncio -async def test_delete_dependency_async_from_dict(): - await test_delete_dependency_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_delete_dependency_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_dependency_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteDependencyRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - call.return_value = None - client.delete_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.delete_dependency._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_delete_dependency_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dependency_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) + client = ApiHubDependenciesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_delete_dependency" + ) as pre: + pre.assert_not_called() + pb_message = apihub_service.DeleteDependencyRequest.pb( + apihub_service.DeleteDependencyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.DeleteDependencyRequest() - - request.name = "name_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_dependency(request) + request = apihub_service.DeleteDependencyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.delete_dependency( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + pre.assert_called_once() -def test_delete_dependency_flattened(): +def test_delete_dependency_rest_bad_request( + transport: str = "rest", request_type=apihub_service.DeleteDependencyRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_dependency( - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_dependency(request) -def test_delete_dependency_flattened_error(): +def test_delete_dependency_rest_flattened(): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dependency( - apihub_service.DeleteDependencyRequest(), - name="name_value", - ) - + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None -@pytest.mark.asyncio -async def test_delete_dependency_flattened_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dependencies/sample3" + } - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dependency), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_dependency( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_dependency(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dependencies/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_dependency_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( +def test_delete_dependency_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_dependency( + client.delete_dependency( apihub_service.DeleteDependencyRequest(), name="name_value", ) +def test_delete_dependency_rest_error(): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2747,99 +2417,46 @@ async def test_delete_dependency_flattened_error_async(): dict, ], ) -def test_list_dependencies(request_type, transport: str = "grpc"): +def test_list_dependencies_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDependenciesResponse( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDependenciesResponse( next_page_token="next_page_token_value", ) - response = client.list_dependencies(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = apihub_service.ListDependenciesRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDependenciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_dependencies(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDependenciesPager) assert response.next_page_token == "next_page_token_value" -def test_list_dependencies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_dependencies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDependenciesRequest() - - -def test_list_dependencies_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = apihub_service.ListDependenciesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_dependencies(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDependenciesRequest( - parent="parent_value", - filter="filter_value", - page_token="page_token_value", - ) - - -def test_list_dependencies_use_cached_wrapped_rpc(): +def test_list_dependencies_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2857,6 +2474,7 @@ def test_list_dependencies_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_dependencies ] = mock_rpc + request = {} client.list_dependencies(request) @@ -2870,277 +2488,256 @@ def test_list_dependencies_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_dependencies_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_dependencies_rest_required_fields( + request_type=apihub_service.ListDependenciesRequest, +): + transport_class = transports.ApiHubDependenciesRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDependenciesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_dependencies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == apihub_service.ListDependenciesRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dependencies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_dependencies_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_dependencies - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dependencies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_dependencies - ] = mock_rpc - - request = {} - await client.list_dependencies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_dependencies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_dependencies_async( - transport: str = "grpc_asyncio", request_type=apihub_service.ListDependenciesRequest -): - client = ApiHubDependenciesAsyncClient( + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDependenciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDependenciesResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_dependencies(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = apihub_service.ListDependenciesRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = apihub_service.ListDependenciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDependenciesAsyncPager) - assert response.next_page_token == "next_page_token_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_dependencies(request) -@pytest.mark.asyncio -async def test_list_dependencies_async_from_dict(): - await test_list_dependencies_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_list_dependencies_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_dependencies_rest_unset_required_fields(): + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListDependenciesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - call.return_value = apihub_service.ListDependenciesResponse() - client.list_dependencies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.list_dependencies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_dependencies_field_headers_async(): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_dependencies_rest_interceptors(null_interceptor): + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubDependenciesRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = apihub_service.ListDependenciesRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ApiHubDependenciesClient(transport=transport) with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDependenciesResponse() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "post_list_dependencies" + ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, "pre_list_dependencies" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = apihub_service.ListDependenciesRequest.pb( + apihub_service.ListDependenciesRequest() ) - await client.list_dependencies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = apihub_service.ListDependenciesResponse.to_json( + apihub_service.ListDependenciesResponse() + ) -def test_list_dependencies_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = apihub_service.ListDependenciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = apihub_service.ListDependenciesResponse() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDependenciesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.list_dependencies( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_dependencies_flattened_error(): +def test_list_dependencies_rest_bad_request( + transport: str = "rest", request_type=apihub_service.ListDependenciesRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dependencies( - apihub_service.ListDependenciesRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_dependencies(request) -@pytest.mark.asyncio -async def test_list_dependencies_flattened_async(): - client = ApiHubDependenciesAsyncClient( +def test_list_dependencies_rest_flattened(): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = apihub_service.ListDependenciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = apihub_service.ListDependenciesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - apihub_service.ListDependenciesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_dependencies( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = apihub_service.ListDependenciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_dependencies(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dependencies" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_dependencies_flattened_error_async(): - client = ApiHubDependenciesAsyncClient( +def test_list_dependencies_rest_flattened_error(transport: str = "rest"): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_dependencies( + client.list_dependencies( apihub_service.ListDependenciesRequest(), parent="parent_value", ) -def test_list_dependencies_pager(transport_name: str = "grpc"): +def test_list_dependencies_rest_pager(transport: str = "rest"): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( apihub_service.ListDependenciesResponse( dependencies=[ common_fields.Dependency(), @@ -3165,3927 +2762,831 @@ def test_list_dependencies_pager(transport_name: str = "grpc"): common_fields.Dependency(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Wrap the values into proper Response objs + response = tuple( + apihub_service.ListDependenciesResponse.to_json(x) for x in response ) - pager = client.list_dependencies(request={}, retry=retry, timeout=timeout) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_dependencies(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, common_fields.Dependency) for i in results) + pages = list(client.list_dependencies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + -def test_list_dependencies_pages(transport_name: str = "grpc"): - client = ApiHubDependenciesClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, ) + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - common_fields.Dependency(), - ], - next_page_token="abc", - ), - apihub_service.ListDependenciesResponse( - dependencies=[], - next_page_token="def", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - ], - next_page_token="ghi", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - ], - ), - RuntimeError, - ) - pages = list(client.list_dependencies(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_dependencies_async_pager(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - common_fields.Dependency(), - ], - next_page_token="abc", - ), - apihub_service.ListDependenciesResponse( - dependencies=[], - next_page_token="def", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - ], - next_page_token="ghi", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_dependencies( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, common_fields.Dependency) for i in responses) - - -@pytest.mark.asyncio -async def test_list_dependencies_async_pages(): - client = ApiHubDependenciesAsyncClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dependencies), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - common_fields.Dependency(), - ], - next_page_token="abc", - ), - apihub_service.ListDependenciesResponse( - dependencies=[], - next_page_token="def", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - ], - next_page_token="ghi", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - ], - ), - RuntimeError, + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_dependencies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.CreateDependencyRequest, - dict, - ], -) -def test_create_dependency_rest(request_type): - client = ApiHubDependenciesClient( + # It is an error to provide an api_key and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["dependency"] = { - "name": "name_value", - "consumer": { - "operation_resource_name": "operation_resource_name_value", - "external_api_resource_name": "external_api_resource_name_value", - "display_name": "display_name_value", - }, - "supplier": {}, - "state": 1, - "description": "description_value", - "discovery_mode": 1, - "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, - "create_time": {}, - "update_time": {}, - "attributes": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.CreateDependencyRequest.meta.fields["dependency"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["dependency"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["dependency"][field])): - del request_init["dependency"][field][i][subfield] - else: - del request_init["dependency"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_dependency(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL - - -def test_create_dependency_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_dependency in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_dependency - ] = mock_rpc - - request = {} - client.create_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_dependency(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_dependency_rest_required_fields( - request_type=apihub_service.CreateDependencyRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_dependency._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("dependency_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_dependency(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_dependency_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_dependency._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("dependencyId",)) - & set( - ( - "parent", - "dependency", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_dependency_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "post_create_dependency" - ) as post, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_create_dependency" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.CreateDependencyRequest.pb( - apihub_service.CreateDependencyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Dependency.to_json( - common_fields.Dependency() - ) - - request = apihub_service.CreateDependencyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Dependency() - - client.create_dependency( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_dependency_rest_bad_request( - transport: str = "rest", request_type=apihub_service.CreateDependencyRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_dependency(request) - - -def test_create_dependency_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - dependency=common_fields.Dependency(name="name_value"), - dependency_id="dependency_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_dependency(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/dependencies" - % client.transport._host, - args[1], - ) - - -def test_create_dependency_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dependency( - apihub_service.CreateDependencyRequest(), - parent="parent_value", - dependency=common_fields.Dependency(name="name_value"), - dependency_id="dependency_id_value", - ) - - -def test_create_dependency_rest_error(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.GetDependencyRequest, - dict, - ], -) -def test_get_dependency_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_dependency(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL - - -def test_get_dependency_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_dependency in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_dependency] = mock_rpc - - request = {} - client.get_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_dependency(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_dependency_rest_required_fields( - request_type=apihub_service.GetDependencyRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_dependency(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_dependency_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_dependency._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_dependency_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "post_get_dependency" - ) as post, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_get_dependency" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.GetDependencyRequest.pb( - apihub_service.GetDependencyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Dependency.to_json( - common_fields.Dependency() - ) - - request = apihub_service.GetDependencyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Dependency() - - client.get_dependency( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_dependency_rest_bad_request( - transport: str = "rest", request_type=apihub_service.GetDependencyRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_dependency(request) - - -def test_get_dependency_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_dependency(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dependencies/*}" - % client.transport._host, - args[1], - ) - - -def test_get_dependency_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dependency( - apihub_service.GetDependencyRequest(), - name="name_value", - ) - - -def test_get_dependency_rest_error(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.UpdateDependencyRequest, - dict, - ], -) -def test_update_dependency_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "dependency": { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - } - request_init["dependency"] = { - "name": "projects/sample1/locations/sample2/dependencies/sample3", - "consumer": { - "operation_resource_name": "operation_resource_name_value", - "external_api_resource_name": "external_api_resource_name_value", - "display_name": "display_name_value", - }, - "supplier": {}, - "state": 1, - "description": "description_value", - "discovery_mode": 1, - "error_detail": {"error": 1, "error_time": {"seconds": 751, "nanos": 543}}, - "create_time": {}, - "update_time": {}, - "attributes": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = apihub_service.UpdateDependencyRequest.meta.fields["dependency"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["dependency"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["dependency"][field])): - del request_init["dependency"][field][i][subfield] - else: - del request_init["dependency"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency( - name="name_value", - state=common_fields.Dependency.State.PROPOSED, - description="description_value", - discovery_mode=common_fields.Dependency.DiscoveryMode.MANUAL, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_dependency(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.Dependency) - assert response.name == "name_value" - assert response.state == common_fields.Dependency.State.PROPOSED - assert response.description == "description_value" - assert response.discovery_mode == common_fields.Dependency.DiscoveryMode.MANUAL - - -def test_update_dependency_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_dependency in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_dependency - ] = mock_rpc - - request = {} - client.update_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_dependency(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_dependency_rest_required_fields( - request_type=apihub_service.UpdateDependencyRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_dependency._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_dependency(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_dependency_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_dependency._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("updateMask",)) - & set( - ( - "dependency", - "updateMask", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_dependency_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "post_update_dependency" - ) as post, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_update_dependency" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.UpdateDependencyRequest.pb( - apihub_service.UpdateDependencyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.Dependency.to_json( - common_fields.Dependency() - ) - - request = apihub_service.UpdateDependencyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.Dependency() - - client.update_dependency( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_dependency_rest_bad_request( - transport: str = "rest", request_type=apihub_service.UpdateDependencyRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "dependency": { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_dependency(request) - - -def test_update_dependency_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.Dependency() - - # get arguments that satisfy an http rule for this method - sample_request = { - "dependency": { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - dependency=common_fields.Dependency(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.Dependency.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_dependency(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{dependency.name=projects/*/locations/*/dependencies/*}" - % client.transport._host, - args[1], - ) - - -def test_update_dependency_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_dependency( - apihub_service.UpdateDependencyRequest(), - dependency=common_fields.Dependency(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_dependency_rest_error(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.DeleteDependencyRequest, - dict, - ], -) -def test_delete_dependency_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_dependency(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_dependency_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_dependency in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_dependency - ] = mock_rpc - - request = {} - client.delete_dependency(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_dependency(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_dependency_rest_required_fields( - request_type=apihub_service.DeleteDependencyRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_dependency._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_dependency(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_dependency_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.delete_dependency._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_dependency_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_delete_dependency" - ) as pre: - pre.assert_not_called() - pb_message = apihub_service.DeleteDependencyRequest.pb( - apihub_service.DeleteDependencyRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = apihub_service.DeleteDependencyRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_dependency( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_dependency_rest_bad_request( - transport: str = "rest", request_type=apihub_service.DeleteDependencyRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/dependencies/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_dependency(request) - - -def test_delete_dependency_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dependencies/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_dependency(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dependencies/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_dependency_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dependency( - apihub_service.DeleteDependencyRequest(), - name="name_value", - ) - - -def test_delete_dependency_rest_error(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - apihub_service.ListDependenciesRequest, - dict, - ], -) -def test_list_dependencies_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDependenciesResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListDependenciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_dependencies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDependenciesPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_dependencies_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_dependencies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_dependencies - ] = mock_rpc - - request = {} - client.list_dependencies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_dependencies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_dependencies_rest_required_fields( - request_type=apihub_service.ListDependenciesRequest, -): - transport_class = transports.ApiHubDependenciesRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_dependencies._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_dependencies._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDependenciesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = apihub_service.ListDependenciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_dependencies(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_dependencies_rest_unset_required_fields(): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_dependencies._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_dependencies_rest_interceptors(null_interceptor): - transport = transports.ApiHubDependenciesRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubDependenciesRestInterceptor(), - ) - client = ApiHubDependenciesClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "post_list_dependencies" - ) as post, mock.patch.object( - transports.ApiHubDependenciesRestInterceptor, "pre_list_dependencies" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = apihub_service.ListDependenciesRequest.pb( - apihub_service.ListDependenciesRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = apihub_service.ListDependenciesResponse.to_json( - apihub_service.ListDependenciesResponse() - ) - - request = apihub_service.ListDependenciesRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = apihub_service.ListDependenciesResponse() - - client.list_dependencies( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_dependencies_rest_bad_request( - transport: str = "rest", request_type=apihub_service.ListDependenciesRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_dependencies(request) - - -def test_list_dependencies_rest_flattened(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = apihub_service.ListDependenciesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = apihub_service.ListDependenciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_dependencies(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/dependencies" - % client.transport._host, - args[1], - ) - - -def test_list_dependencies_rest_flattened_error(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dependencies( - apihub_service.ListDependenciesRequest(), - parent="parent_value", - ) - - -def test_list_dependencies_rest_pager(transport: str = "rest"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - common_fields.Dependency(), - ], - next_page_token="abc", - ), - apihub_service.ListDependenciesResponse( - dependencies=[], - next_page_token="def", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - ], - next_page_token="ghi", - ), - apihub_service.ListDependenciesResponse( - dependencies=[ - common_fields.Dependency(), - common_fields.Dependency(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - apihub_service.ListDependenciesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_dependencies(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, common_fields.Dependency) for i in results) - - pages = list(client.list_dependencies(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubDependenciesClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ApiHubDependenciesClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubDependenciesGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ApiHubDependenciesGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - transports.ApiHubDependenciesRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ApiHubDependenciesClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ApiHubDependenciesGrpcTransport, - ) - - -def test_api_hub_dependencies_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ApiHubDependenciesTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_api_hub_dependencies_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.ApiHubDependenciesTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_dependency", - "get_dependency", - "update_dependency", - "delete_dependency", - "list_dependencies", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_api_hub_dependencies_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubDependenciesTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_api_hub_dependencies_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubDependenciesTransport() - adc.assert_called_once() - - -def test_api_hub_dependencies_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ApiHubDependenciesClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ], -) -def test_api_hub_dependencies_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - transports.ApiHubDependenciesRestTransport, - ], -) -def test_api_hub_dependencies_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ApiHubDependenciesGrpcTransport, grpc_helpers), - (transports.ApiHubDependenciesGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_api_hub_dependencies_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ], -) -def test_api_hub_dependencies_grpc_transport_client_cert_source_for_mtls( - transport_class, -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_api_hub_dependencies_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.ApiHubDependenciesRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_dependencies_host_no_port(transport_name): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_dependencies_host_with_port(transport_name): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_api_hub_dependencies_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ApiHubDependenciesClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ApiHubDependenciesClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_dependency._session - session2 = client2.transport.create_dependency._session - assert session1 != session2 - session1 = client1.transport.get_dependency._session - session2 = client2.transport.get_dependency._session - assert session1 != session2 - session1 = client1.transport.update_dependency._session - session2 = client2.transport.update_dependency._session - assert session1 != session2 - session1 = client1.transport.delete_dependency._session - session2 = client2.transport.delete_dependency._session - assert session1 != session2 - session1 = client1.transport.list_dependencies._session - session2 = client2.transport.list_dependencies._session - assert session1 != session2 - - -def test_api_hub_dependencies_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubDependenciesGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_api_hub_dependencies_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubDependenciesGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ], -) -def test_api_hub_dependencies_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubDependenciesGrpcTransport, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ], -) -def test_api_hub_dependencies_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_attribute_path(): - project = "squid" - location = "clam" - attribute = "whelk" - expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( - project=project, - location=location, - attribute=attribute, - ) - actual = ApiHubDependenciesClient.attribute_path(project, location, attribute) - assert expected == actual - - -def test_parse_attribute_path(): - expected = { - "project": "octopus", - "location": "oyster", - "attribute": "nudibranch", - } - path = ApiHubDependenciesClient.attribute_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_attribute_path(path) - assert expected == actual - - -def test_dependency_path(): - project = "cuttlefish" - location = "mussel" - dependency = "winkle" - expected = ( - "projects/{project}/locations/{location}/dependencies/{dependency}".format( - project=project, - location=location, - dependency=dependency, - ) - ) - actual = ApiHubDependenciesClient.dependency_path(project, location, dependency) - assert expected == actual - - -def test_parse_dependency_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "dependency": "abalone", - } - path = ApiHubDependenciesClient.dependency_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_dependency_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = ApiHubDependenciesClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = ApiHubDependenciesClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = ApiHubDependenciesClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = ApiHubDependenciesClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = ApiHubDependenciesClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = ApiHubDependenciesClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = ApiHubDependenciesClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = ApiHubDependenciesClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = ApiHubDependenciesClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = ApiHubDependenciesClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubDependenciesClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" - ) as prep: - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = ApiHubDependenciesClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + client_options=options, + transport=transport, + ) - response = client.list_operations(request) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # It is an error to provide scopes and a transport instance. + transport = transports.ApiHubDependenciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubDependenciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -def test_delete_operation(transport: str = "grpc"): - client = ApiHubDependenciesClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ApiHubDependenciesRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + client = ApiHubDependenciesClient(transport=transport) + assert client.transport is transport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_class", + [ + transports.ApiHubDependenciesRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ApiHubDependenciesClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_api_hub_dependencies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ApiHubDependenciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert response is None +def test_api_hub_dependencies_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ApiHubDependenciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_dependency", + "get_dependency", + "update_dependency", + "delete_dependency", + "list_dependencies", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + with pytest.raises(NotImplementedError): + transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" +def test_api_hub_dependencies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubDependenciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_api_hub_dependencies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.api_hub_dependencies.transports.ApiHubDependenciesTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubDependenciesTransport() + adc.assert_called_once() -def test_delete_operation_from_dict(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } +def test_api_hub_dependencies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ApiHubDependenciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_api_hub_dependencies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ApiHubDependenciesRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_dependencies_host_no_port(transport_name): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_dependencies_host_with_port(transport_name): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_dependencies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ApiHubDependenciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ApiHubDependenciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_dependency._session + session2 = client2.transport.create_dependency._session + assert session1 != session2 + session1 = client1.transport.get_dependency._session + session2 = client2.transport.get_dependency._session + assert session1 != session2 + session1 = client1.transport.update_dependency._session + session2 = client2.transport.update_dependency._session + assert session1 != session2 + session1 = client1.transport.delete_dependency._session + session2 = client2.transport.delete_dependency._session + assert session1 != session2 + session1 = client1.transport.list_dependencies._session + session2 = client2.transport.list_dependencies._session + assert session1 != session2 -def test_cancel_operation_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_attribute_path(): + project = "squid" + location = "clam" + attribute = "whelk" + expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, ) + actual = ApiHubDependenciesClient.attribute_path(project, location, attribute) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_attribute_path(): + expected = { + "project": "octopus", + "location": "oyster", + "attribute": "nudibranch", + } + path = ApiHubDependenciesClient.attribute_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_attribute_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_dependency_path(): + project = "cuttlefish" + location = "mussel" + dependency = "winkle" + expected = ( + "projects/{project}/locations/{location}/dependencies/{dependency}".format( + project=project, + location=location, + dependency=dependency, + ) ) + actual = ApiHubDependenciesClient.dependency_path(project, location, dependency) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_dependency_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "dependency": "abalone", + } + path = ApiHubDependenciesClient.dependency_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_dependency_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = ApiHubDependenciesClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ApiHubDependenciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, ) + actual = ApiHubDependenciesClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ApiHubDependenciesClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = ApiHubDependenciesClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ApiHubDependenciesClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, ) + actual = ApiHubDependenciesClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ApiHubDependenciesClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = ApiHubDependenciesClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ApiHubDependenciesClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = ApiHubDependenciesClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = ApiHubDependenciesClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" + ) as prep: + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ApiHubDependenciesTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ApiHubDependenciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ApiHubDependenciesAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): - client = ApiHubDependenciesClient(credentials=ga_credentials.AnonymousCredentials()) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials() +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ApiHubDependenciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = ApiHubDependenciesClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ApiHubDependenciesAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -7103,7 +3604,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = ApiHubDependenciesClient( @@ -7120,11 +3620,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (ApiHubDependenciesClient, transports.ApiHubDependenciesGrpcTransport), - ( - ApiHubDependenciesAsyncClient, - transports.ApiHubDependenciesGrpcAsyncIOTransport, - ), + (ApiHubDependenciesClient, transports.ApiHubDependenciesRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py index 97adab5ecf39..dc92c3df1475 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py @@ -46,7 +46,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.api_hub_plugin import ( - ApiHubPluginAsyncClient, ApiHubPluginClient, transports, ) @@ -195,11 +194,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubPluginClient), ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubPluginAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -284,7 +278,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc"), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest"), ], ) @@ -364,8 +357,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubPluginClient, "grpc"), - (ApiHubPluginAsyncClient, "grpc_asyncio"), (ApiHubPluginClient, "rest"), ], ) @@ -390,8 +381,6 @@ def test_api_hub_plugin_client_from_service_account_info(client_class, transport @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.ApiHubPluginGrpcTransport, "grpc"), - (transports.ApiHubPluginGrpcAsyncIOTransport, "grpc_asyncio"), (transports.ApiHubPluginRestTransport, "rest"), ], ) @@ -416,8 +405,6 @@ def test_api_hub_plugin_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (ApiHubPluginClient, "grpc"), - (ApiHubPluginAsyncClient, "grpc_asyncio"), (ApiHubPluginClient, "rest"), ], ) @@ -449,24 +436,17 @@ def test_api_hub_plugin_client_from_service_account_file(client_class, transport def test_api_hub_plugin_client_get_transport_class(): transport = ApiHubPluginClient.get_transport_class() available_transports = [ - transports.ApiHubPluginGrpcTransport, transports.ApiHubPluginRestTransport, ] assert transport in available_transports - transport = ApiHubPluginClient.get_transport_class("grpc") - assert transport == transports.ApiHubPluginGrpcTransport + transport = ApiHubPluginClient.get_transport_class("rest") + assert transport == transports.ApiHubPluginRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc"), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest"), ], ) @@ -475,11 +455,6 @@ def test_api_hub_plugin_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubPluginClient), ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubPluginAsyncClient), -) def test_api_hub_plugin_client_client_options( client_class, transport_class, transport_name ): @@ -613,20 +588,6 @@ def test_api_hub_plugin_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc", "true"), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc", "false"), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest", "true"), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest", "false"), ], @@ -636,11 +597,6 @@ def test_api_hub_plugin_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubPluginClient), ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubPluginAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_api_hub_plugin_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -744,15 +700,10 @@ def test_api_hub_plugin_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [ApiHubPluginClient, ApiHubPluginAsyncClient]) +@pytest.mark.parametrize("client_class", [ApiHubPluginClient]) @mock.patch.object( ApiHubPluginClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ApiHubPluginClient) ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ApiHubPluginAsyncClient), -) def test_api_hub_plugin_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -844,17 +795,12 @@ def test_api_hub_plugin_client_get_mtls_endpoint_and_cert_source(client_class): ) -@pytest.mark.parametrize("client_class", [ApiHubPluginClient, ApiHubPluginAsyncClient]) +@pytest.mark.parametrize("client_class", [ApiHubPluginClient]) @mock.patch.object( ApiHubPluginClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ApiHubPluginClient), ) -@mock.patch.object( - ApiHubPluginAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ApiHubPluginAsyncClient), -) def test_api_hub_plugin_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -931,12 +877,6 @@ def test_api_hub_plugin_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport, "grpc"), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest"), ], ) @@ -968,18 +908,6 @@ def test_api_hub_plugin_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - ApiHubPluginClient, - transports.ApiHubPluginGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), (ApiHubPluginClient, transports.ApiHubPluginRestTransport, "rest", None), ], ) @@ -1007,94 +935,6 @@ def test_api_hub_plugin_client_client_options_credentials_file( ) -def test_api_hub_plugin_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = ApiHubPluginClient(client_options={"api_endpoint": "squid.clam.whelk"}) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - ApiHubPluginClient, - transports.ApiHubPluginGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ApiHubPluginAsyncClient, - transports.ApiHubPluginGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_api_hub_plugin_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1102,32 +942,36 @@ def test_api_hub_plugin_client_create_channel_credentials_file( dict, ], ) -def test_get_plugin(request_type, transport: str = "grpc"): +def test_get_plugin_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin( name="name_value", display_name="display_name_value", description="description_value", state=plugin_service.Plugin.State.ENABLED, ) - response = client.get_plugin(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = plugin_service.GetPluginRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_plugin(request) # Establish that the response is the type that we expect. assert isinstance(response, plugin_service.Plugin) @@ -1137,60 +981,13 @@ def test_get_plugin(request_type, transport: str = "grpc"): assert response.state == plugin_service.Plugin.State.ENABLED -def test_get_plugin_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.GetPluginRequest() - - -def test_get_plugin_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = plugin_service.GetPluginRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_plugin(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.GetPluginRequest( - name="name_value", - ) - - -def test_get_plugin_use_cached_wrapped_rpc(): +def test_get_plugin_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1206,6 +1003,7 @@ def test_get_plugin_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_plugin] = mock_rpc + request = {} client.get_plugin(request) @@ -1219,260 +1017,230 @@ def test_get_plugin_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_plugin_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.get_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.GetPluginRequest() - - -@pytest.mark.asyncio -async def test_get_plugin_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_plugin - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_plugin - ] = mock_rpc - - request = {} - await client.get_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_plugin(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - +def test_get_plugin_rest_required_fields(request_type=plugin_service.GetPluginRequest): + transport_class = transports.ApiHubPluginRestTransport -@pytest.mark.asyncio -async def test_get_plugin_async( - transport: str = "grpc_asyncio", request_type=plugin_service.GetPluginRequest -): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.get_plugin(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = plugin_service.GetPluginRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_get_plugin_async_from_dict(): - await test_get_plugin_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_get_plugin_field_headers(): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.GetPluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - call.return_value = plugin_service.Plugin() - client.get_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_plugin_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.GetPluginRequest() + response_value = Response() + response_value.status_code = 200 - request.name = "name_value" + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - await client.get_plugin(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_plugin(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_plugin_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_plugin_rest_unset_required_fields(): + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_plugin( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + unset_fields = transport.get_plugin._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_plugin_flattened_error(): - client = ApiHubPluginClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_plugin_rest_interceptors(null_interceptor): + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubPluginRestInterceptor(), ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): + client = ApiHubPluginClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "post_get_plugin" + ) as post, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "pre_get_plugin" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = plugin_service.GetPluginRequest.pb( + plugin_service.GetPluginRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = plugin_service.Plugin.to_json( + plugin_service.Plugin() + ) + + request = plugin_service.GetPluginRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = plugin_service.Plugin() + client.get_plugin( - plugin_service.GetPluginRequest(), - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() + -@pytest.mark.asyncio -async def test_get_plugin_flattened_async(): - client = ApiHubPluginAsyncClient( +def test_get_plugin_rest_bad_request( + transport: str = "rest", request_type=plugin_service.GetPluginRequest +): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_plugin( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_plugin(request) + + +def test_get_plugin_rest_flattened(): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_plugin(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*}" % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_plugin_flattened_error_async(): - client = ApiHubPluginAsyncClient( +def test_get_plugin_rest_flattened_error(transport: str = "rest"): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_plugin( + client.get_plugin( plugin_service.GetPluginRequest(), name="name_value", ) +def test_get_plugin_rest_error(): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1480,32 +1248,36 @@ async def test_get_plugin_flattened_error_async(): dict, ], ) -def test_enable_plugin(request_type, transport: str = "grpc"): +def test_enable_plugin_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin( name="name_value", display_name="display_name_value", description="description_value", state=plugin_service.Plugin.State.ENABLED, ) - response = client.enable_plugin(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = plugin_service.EnablePluginRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enable_plugin(request) # Establish that the response is the type that we expect. assert isinstance(response, plugin_service.Plugin) @@ -1515,60 +1287,13 @@ def test_enable_plugin(request_type, transport: str = "grpc"): assert response.state == plugin_service.Plugin.State.ENABLED -def test_enable_plugin_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.enable_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.EnablePluginRequest() - - -def test_enable_plugin_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = plugin_service.EnablePluginRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.enable_plugin(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.EnablePluginRequest( - name="name_value", - ) - - -def test_enable_plugin_use_cached_wrapped_rpc(): +def test_enable_plugin_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1584,6 +1309,7 @@ def test_enable_plugin_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.enable_plugin] = mock_rpc + request = {} client.enable_plugin(request) @@ -1597,262 +1323,234 @@ def test_enable_plugin_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_enable_plugin_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.enable_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.EnablePluginRequest() - - -@pytest.mark.asyncio -async def test_enable_plugin_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_enable_plugin_rest_required_fields( + request_type=plugin_service.EnablePluginRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubPluginRestTransport - # Ensure method has been cached - assert ( - client._client._transport.enable_plugin - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.enable_plugin - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.enable_plugin(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.enable_plugin(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).enable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_enable_plugin_async( - transport: str = "grpc_asyncio", request_type=plugin_service.EnablePluginRequest -): - client = ApiHubPluginAsyncClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.enable_plugin(request) + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = plugin_service.EnablePluginRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_enable_plugin_async_from_dict(): - await test_enable_plugin_async(request_type=dict) + response = client.enable_plugin(request) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_enable_plugin_field_headers(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.EnablePluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - call.return_value = plugin_service.Plugin() - client.enable_plugin(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_enable_plugin_rest_unset_required_fields(): + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.enable_plugin._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_enable_plugin_field_headers_async(): - client = ApiHubPluginAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enable_plugin_rest_interceptors(null_interceptor): + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ApiHubPluginRestInterceptor(), ) + client = ApiHubPluginClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "post_enable_plugin" + ) as post, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "pre_enable_plugin" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = plugin_service.EnablePluginRequest.pb( + plugin_service.EnablePluginRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.EnablePluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = plugin_service.Plugin.to_json( plugin_service.Plugin() ) - await client.enable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - -def test_enable_plugin_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = plugin_service.EnablePluginRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = plugin_service.Plugin() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.enable_plugin( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_enable_plugin_flattened_error(): +def test_enable_plugin_rest_bad_request( + transport: str = "rest", request_type=plugin_service.EnablePluginRequest +): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.enable_plugin( - plugin_service.EnablePluginRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_plugin(request) -@pytest.mark.asyncio -async def test_enable_plugin_flattened_async(): - client = ApiHubPluginAsyncClient( +def test_enable_plugin_rest_flattened(): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.enable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.enable_plugin( + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.enable_plugin(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*}:enable" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_enable_plugin_flattened_error_async(): - client = ApiHubPluginAsyncClient( +def test_enable_plugin_rest_flattened_error(transport: str = "rest"): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.enable_plugin( + client.enable_plugin( plugin_service.EnablePluginRequest(), name="name_value", ) +def test_enable_plugin_rest_error(): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1860,32 +1558,36 @@ async def test_enable_plugin_flattened_error_async(): dict, ], ) -def test_disable_plugin(request_type, transport: str = "grpc"): +def test_disable_plugin_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = plugin_service.Plugin( name="name_value", display_name="display_name_value", description="description_value", state=plugin_service.Plugin.State.ENABLED, ) - response = client.disable_plugin(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = plugin_service.DisablePluginRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = plugin_service.Plugin.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.disable_plugin(request) # Establish that the response is the type that we expect. assert isinstance(response, plugin_service.Plugin) @@ -1895,60 +1597,13 @@ def test_disable_plugin(request_type, transport: str = "grpc"): assert response.state == plugin_service.Plugin.State.ENABLED -def test_disable_plugin_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.disable_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.DisablePluginRequest() - - -def test_disable_plugin_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = plugin_service.DisablePluginRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.disable_plugin(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.DisablePluginRequest( - name="name_value", - ) - - -def test_disable_plugin_use_cached_wrapped_rpc(): +def test_disable_plugin_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1964,6 +1619,7 @@ def test_disable_plugin_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.disable_plugin] = mock_rpc + request = {} client.disable_plugin(request) @@ -1977,380 +1633,44 @@ def test_disable_plugin_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_disable_plugin_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.disable_plugin() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == plugin_service.DisablePluginRequest() - - -@pytest.mark.asyncio -async def test_disable_plugin_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_disable_plugin_rest_required_fields( + request_type=plugin_service.DisablePluginRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ApiHubPluginRestTransport - # Ensure method has been cached - assert ( - client._client._transport.disable_plugin - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.disable_plugin - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.disable_plugin(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.disable_plugin(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).disable_plugin._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_disable_plugin_async( - transport: str = "grpc_asyncio", request_type=plugin_service.DisablePluginRequest -): - client = ApiHubPluginAsyncClient( + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - ) - response = await client.disable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = plugin_service.DisablePluginRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED - - -@pytest.mark.asyncio -async def test_disable_plugin_async_from_dict(): - await test_disable_plugin_async(request_type=dict) - - -def test_disable_plugin_field_headers(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.DisablePluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - call.return_value = plugin_service.Plugin() - client.disable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_disable_plugin_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = plugin_service.DisablePluginRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - await client.disable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_disable_plugin_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.disable_plugin( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_disable_plugin_flattened_error(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.disable_plugin( - plugin_service.DisablePluginRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_disable_plugin_flattened_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.disable_plugin), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = plugin_service.Plugin() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - plugin_service.Plugin() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.disable_plugin( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_disable_plugin_flattened_error_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.disable_plugin( - plugin_service.DisablePluginRequest(), - name="name_value", - ) - - -@pytest.mark.parametrize( - "request_type", - [ - plugin_service.GetPluginRequest, - dict, - ], -) -def test_get_plugin_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_plugin(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED - - -def test_get_plugin_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_plugin in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_plugin] = mock_rpc - - request = {} - client.get_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_plugin(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_plugin_rest_required_fields(request_type=plugin_service.GetPluginRequest): - transport_class = transports.ApiHubPluginRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = plugin_service.Plugin() @@ -2365,9 +1685,10 @@ def test_get_plugin_rest_required_fields(request_type=plugin_service.GetPluginRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -2380,24 +1701,24 @@ def test_get_plugin_rest_required_fields(request_type=plugin_service.GetPluginRe response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_plugin(request) + response = client.disable_plugin(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_plugin_rest_unset_required_fields(): +def test_disable_plugin_rest_unset_required_fields(): transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_plugin._get_unset_required_fields({}) + unset_fields = transport.disable_plugin._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_plugin_rest_interceptors(null_interceptor): +def test_disable_plugin_rest_interceptors(null_interceptor): transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2410,14 +1731,14 @@ def test_get_plugin_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "post_get_plugin" + transports.ApiHubPluginRestInterceptor, "post_disable_plugin" ) as post, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "pre_get_plugin" + transports.ApiHubPluginRestInterceptor, "pre_disable_plugin" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = plugin_service.GetPluginRequest.pb( - plugin_service.GetPluginRequest() + pb_message = plugin_service.DisablePluginRequest.pb( + plugin_service.DisablePluginRequest() ) transcode.return_value = { "method": "post", @@ -2433,7 +1754,7 @@ def test_get_plugin_rest_interceptors(null_interceptor): plugin_service.Plugin() ) - request = plugin_service.GetPluginRequest() + request = plugin_service.DisablePluginRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -2441,7 +1762,7 @@ def test_get_plugin_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = plugin_service.Plugin() - client.get_plugin( + client.disable_plugin( request, metadata=[ ("key", "val"), @@ -2453,8 +1774,8 @@ def test_get_plugin_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_plugin_rest_bad_request( - transport: str = "rest", request_type=plugin_service.GetPluginRequest +def test_disable_plugin_rest_bad_request( + transport: str = "rest", request_type=plugin_service.DisablePluginRequest ): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2474,10 +1795,10 @@ def test_get_plugin_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_plugin(request) + client.disable_plugin(request) -def test_get_plugin_rest_flattened(): +def test_disable_plugin_rest_flattened(): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2506,19 +1827,20 @@ def test_get_plugin_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_plugin(**mock_args) + client.disable_plugin(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*}" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/plugins/*}:disable" + % client.transport._host, args[1], ) -def test_get_plugin_rest_flattened_error(transport: str = "rest"): +def test_disable_plugin_rest_flattened_error(transport: str = "rest"): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2527,2566 +1849,806 @@ def test_get_plugin_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_plugin( - plugin_service.GetPluginRequest(), + client.disable_plugin( + plugin_service.DisablePluginRequest(), name="name_value", ) -def test_get_plugin_rest_error(): +def test_disable_plugin_rest_error(): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) -@pytest.mark.parametrize( - "request_type", - [ - plugin_service.EnablePluginRequest, - dict, - ], -) -def test_enable_plugin_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.enable_plugin(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED - - -def test_enable_plugin_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.enable_plugin in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.enable_plugin] = mock_rpc - - request = {} - client.enable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.enable_plugin(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_enable_plugin_rest_required_fields( - request_type=plugin_service.EnablePluginRequest, -): - transport_class = transports.ApiHubPluginRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).enable_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).enable_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.enable_plugin(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_enable_plugin_rest_unset_required_fields(): - transport = transports.ApiHubPluginRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.enable_plugin._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_enable_plugin_rest_interceptors(null_interceptor): +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubPluginRestInterceptor(), - ) - client = ApiHubPluginClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "post_enable_plugin" - ) as post, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "pre_enable_plugin" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = plugin_service.EnablePluginRequest.pb( - plugin_service.EnablePluginRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = plugin_service.Plugin.to_json( - plugin_service.Plugin() - ) - - request = plugin_service.EnablePluginRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = plugin_service.Plugin() - - client.enable_plugin( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_enable_plugin_rest_bad_request( - transport: str = "rest", request_type=plugin_service.EnablePluginRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.enable_plugin(request) - - -def test_enable_plugin_rest_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.enable_plugin(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*}:enable" - % client.transport._host, - args[1], - ) - - -def test_enable_plugin_rest_flattened_error(transport: str = "rest"): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - client.enable_plugin( - plugin_service.EnablePluginRequest(), - name="name_value", - ) - - -def test_enable_plugin_rest_error(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - plugin_service.DisablePluginRequest, - dict, - ], -) -def test_disable_plugin_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin( - name="name_value", - display_name="display_name_value", - description="description_value", - state=plugin_service.Plugin.State.ENABLED, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.disable_plugin(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, plugin_service.Plugin) - assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.description == "description_value" - assert response.state == plugin_service.Plugin.State.ENABLED - - -def test_disable_plugin_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.disable_plugin in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.disable_plugin] = mock_rpc - - request = {} - client.disable_plugin(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.disable_plugin(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_disable_plugin_rest_required_fields( - request_type=plugin_service.DisablePluginRequest, -): - transport_class = transports.ApiHubPluginRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).disable_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).disable_plugin._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.disable_plugin(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_disable_plugin_rest_unset_required_fields(): - transport = transports.ApiHubPluginRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.disable_plugin._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_disable_plugin_rest_interceptors(null_interceptor): - transport = transports.ApiHubPluginRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ApiHubPluginRestInterceptor(), - ) - client = ApiHubPluginClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "post_disable_plugin" - ) as post, mock.patch.object( - transports.ApiHubPluginRestInterceptor, "pre_disable_plugin" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = plugin_service.DisablePluginRequest.pb( - plugin_service.DisablePluginRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = plugin_service.Plugin.to_json( - plugin_service.Plugin() - ) - - request = plugin_service.DisablePluginRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = plugin_service.Plugin() - - client.disable_plugin( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_disable_plugin_rest_bad_request( - transport: str = "rest", request_type=plugin_service.DisablePluginRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.disable_plugin(request) - - -def test_disable_plugin_rest_flattened(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = plugin_service.Plugin() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/plugins/sample3"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = plugin_service.Plugin.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.disable_plugin(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*}:disable" - % client.transport._host, - args[1], - ) - - -def test_disable_plugin_rest_flattened_error(transport: str = "rest"): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.disable_plugin( - plugin_service.DisablePluginRequest(), - name="name_value", - ) - - -def test_disable_plugin_rest_error(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubPluginClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubPluginClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ApiHubPluginClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ApiHubPluginClient( - client_options={"scopes": ["1", "2"]}, transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ApiHubPluginClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ApiHubPluginGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ApiHubPluginGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubPluginGrpcTransport, - transports.ApiHubPluginGrpcAsyncIOTransport, - transports.ApiHubPluginRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ApiHubPluginClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ApiHubPluginGrpcTransport, - ) - - -def test_api_hub_plugin_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ApiHubPluginTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_api_hub_plugin_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.ApiHubPluginTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "get_plugin", - "enable_plugin", - "disable_plugin", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_api_hub_plugin_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubPluginTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_api_hub_plugin_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ApiHubPluginTransport() - adc.assert_called_once() - - -def test_api_hub_plugin_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ApiHubPluginClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubPluginGrpcTransport, - transports.ApiHubPluginGrpcAsyncIOTransport, - ], -) -def test_api_hub_plugin_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ApiHubPluginGrpcTransport, - transports.ApiHubPluginGrpcAsyncIOTransport, - transports.ApiHubPluginRestTransport, - ], -) -def test_api_hub_plugin_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ApiHubPluginGrpcTransport, grpc_helpers), - (transports.ApiHubPluginGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_api_hub_plugin_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubPluginGrpcTransport, transports.ApiHubPluginGrpcAsyncIOTransport], -) -def test_api_hub_plugin_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_api_hub_plugin_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.ApiHubPluginRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_plugin_host_no_port(transport_name): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_api_hub_plugin_host_with_port(transport_name): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_api_hub_plugin_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ApiHubPluginClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ApiHubPluginClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_plugin._session - session2 = client2.transport.get_plugin._session - assert session1 != session2 - session1 = client1.transport.enable_plugin._session - session2 = client2.transport.enable_plugin._session - assert session1 != session2 - session1 = client1.transport.disable_plugin._session - session2 = client2.transport.disable_plugin._session - assert session1 != session2 - - -def test_api_hub_plugin_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubPluginGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_api_hub_plugin_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ApiHubPluginGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubPluginGrpcTransport, transports.ApiHubPluginGrpcAsyncIOTransport], -) -def test_api_hub_plugin_transport_channel_mtls_with_client_cert_source(transport_class): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ApiHubPluginGrpcTransport, transports.ApiHubPluginGrpcAsyncIOTransport], -) -def test_api_hub_plugin_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_attribute_path(): - project = "squid" - location = "clam" - attribute = "whelk" - expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( - project=project, - location=location, - attribute=attribute, - ) - actual = ApiHubPluginClient.attribute_path(project, location, attribute) - assert expected == actual - - -def test_parse_attribute_path(): - expected = { - "project": "octopus", - "location": "oyster", - "attribute": "nudibranch", - } - path = ApiHubPluginClient.attribute_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_attribute_path(path) - assert expected == actual - - -def test_plugin_path(): - project = "cuttlefish" - location = "mussel" - plugin = "winkle" - expected = "projects/{project}/locations/{location}/plugins/{plugin}".format( - project=project, - location=location, - plugin=plugin, - ) - actual = ApiHubPluginClient.plugin_path(project, location, plugin) - assert expected == actual - - -def test_parse_plugin_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "plugin": "abalone", - } - path = ApiHubPluginClient.plugin_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_plugin_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = ApiHubPluginClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = ApiHubPluginClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = ApiHubPluginClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = ApiHubPluginClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = ApiHubPluginClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = ApiHubPluginClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = ApiHubPluginClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = ApiHubPluginClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = ApiHubPluginClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = ApiHubPluginClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ApiHubPluginClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.ApiHubPluginTransport, "_prep_wrapped_messages" - ) as prep: - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.ApiHubPluginTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = ApiHubPluginClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - + ) -def test_delete_operation(transport: str = "grpc"): - client = ApiHubPluginClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + with pytest.raises(ValueError): + client = ApiHubPluginClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() + # It is an error to provide an api_key and a transport instance. + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubPluginClient( + client_options=options, + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ApiHubPluginClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) - # Establish that the response is the type that we expect. - assert response is None + # It is an error to provide scopes and a transport instance. + transport = transports.ApiHubPluginRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ApiHubPluginClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ApiHubPluginRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + client = ApiHubPluginClient(transport=transport) + assert client.transport is transport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_class", + [ + transports.ApiHubPluginRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -def test_delete_operation_field_headers(): - client = ApiHubPluginClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ApiHubPluginClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), ) + assert transport.kind == transport_name - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_api_hub_plugin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ApiHubPluginTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_api_hub_plugin_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ApiHubPluginTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_plugin", + "enable_plugin", + "disable_plugin", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" + with pytest.raises(NotImplementedError): + transport.close() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + +def test_api_hub_plugin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubPluginTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) -def test_delete_operation_from_dict(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } +def test_api_hub_plugin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.api_hub_plugin.transports.ApiHubPluginTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ApiHubPluginTransport() + adc.assert_called_once() + + +def test_api_hub_plugin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ApiHubPluginClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_api_hub_plugin_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ApiHubPluginRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_plugin_host_no_port(transport_name): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_plugin_host_with_port(transport_name): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_api_hub_plugin_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ApiHubPluginClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ApiHubPluginClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_plugin._session + session2 = client2.transport.get_plugin._session + assert session1 != session2 + session1 = client1.transport.enable_plugin._session + session2 = client2.transport.enable_plugin._session + assert session1 != session2 + session1 = client1.transport.disable_plugin._session + session2 = client2.transport.disable_plugin._session + assert session1 != session2 -def test_cancel_operation_field_headers(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_attribute_path(): + project = "squid" + location = "clam" + attribute = "whelk" + expected = "projects/{project}/locations/{location}/attributes/{attribute}".format( + project=project, + location=location, + attribute=attribute, ) + actual = ApiHubPluginClient.attribute_path(project, location, attribute) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_attribute_path(): + expected = { + "project": "octopus", + "location": "oyster", + "attribute": "nudibranch", + } + path = ApiHubPluginClient.attribute_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_attribute_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_plugin_path(): + project = "cuttlefish" + location = "mussel" + plugin = "winkle" + expected = "projects/{project}/locations/{location}/plugins/{plugin}".format( + project=project, + location=location, + plugin=plugin, ) + actual = ApiHubPluginClient.plugin_path(project, location, plugin) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_plugin_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "plugin": "abalone", + } + path = ApiHubPluginClient.plugin_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_plugin_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = ApiHubPluginClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ApiHubPluginClient.common_billing_account_path(**expected) + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, ) + actual = ApiHubPluginClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ApiHubPluginClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = ApiHubPluginClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ApiHubPluginClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, ) + actual = ApiHubPluginClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ApiHubPluginClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = ApiHubPluginClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ApiHubPluginClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = ApiHubPluginClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = ApiHubPluginClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.ApiHubPluginTransport, "_prep_wrapped_messages" + ) as prep: + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ApiHubPluginTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ApiHubPluginClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ApiHubPluginAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): - client = ApiHubPluginClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + response = client.get_operation(request) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ApiHubPluginAsyncClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ApiHubPluginClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = ApiHubPluginClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ApiHubPluginAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -5104,7 +2666,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = ApiHubPluginClient( @@ -5121,8 +2682,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (ApiHubPluginClient, transports.ApiHubPluginGrpcTransport), - (ApiHubPluginAsyncClient, transports.ApiHubPluginGrpcAsyncIOTransport), + (ApiHubPluginClient, transports.ApiHubPluginRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py index e951e616c819..15813aa93505 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py @@ -47,7 +47,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.host_project_registration_service import ( - HostProjectRegistrationServiceAsyncClient, HostProjectRegistrationServiceClient, pagers, transports, @@ -230,11 +229,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(HostProjectRegistrationServiceAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -333,11 +327,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -421,8 +410,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (HostProjectRegistrationServiceClient, "grpc"), - (HostProjectRegistrationServiceAsyncClient, "grpc_asyncio"), (HostProjectRegistrationServiceClient, "rest"), ], ) @@ -449,8 +436,6 @@ def test_host_project_registration_service_client_from_service_account_info( @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.HostProjectRegistrationServiceGrpcTransport, "grpc"), - (transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), (transports.HostProjectRegistrationServiceRestTransport, "rest"), ], ) @@ -475,8 +460,6 @@ def test_host_project_registration_service_client_service_account_always_use_jwt @pytest.mark.parametrize( "client_class,transport_name", [ - (HostProjectRegistrationServiceClient, "grpc"), - (HostProjectRegistrationServiceAsyncClient, "grpc_asyncio"), (HostProjectRegistrationServiceClient, "rest"), ], ) @@ -510,28 +493,17 @@ def test_host_project_registration_service_client_from_service_account_file( def test_host_project_registration_service_client_get_transport_class(): transport = HostProjectRegistrationServiceClient.get_transport_class() available_transports = [ - transports.HostProjectRegistrationServiceGrpcTransport, transports.HostProjectRegistrationServiceRestTransport, ] assert transport in available_transports - transport = HostProjectRegistrationServiceClient.get_transport_class("grpc") - assert transport == transports.HostProjectRegistrationServiceGrpcTransport + transport = HostProjectRegistrationServiceClient.get_transport_class("rest") + assert transport == transports.HostProjectRegistrationServiceRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -544,11 +516,6 @@ def test_host_project_registration_service_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(HostProjectRegistrationServiceAsyncClient), -) def test_host_project_registration_service_client_client_options( client_class, transport_class, transport_name ): @@ -686,30 +653,6 @@ def test_host_project_registration_service_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - "true", - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - "false", - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -729,11 +672,6 @@ def test_host_project_registration_service_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(HostProjectRegistrationServiceAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_host_project_registration_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -837,20 +775,12 @@ def test_host_project_registration_service_client_mtls_env_auto( ) -@pytest.mark.parametrize( - "client_class", - [HostProjectRegistrationServiceClient, HostProjectRegistrationServiceAsyncClient], -) +@pytest.mark.parametrize("client_class", [HostProjectRegistrationServiceClient]) @mock.patch.object( HostProjectRegistrationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(HostProjectRegistrationServiceAsyncClient), -) def test_host_project_registration_service_client_get_mtls_endpoint_and_cert_source( client_class, ): @@ -944,20 +874,12 @@ def test_host_project_registration_service_client_get_mtls_endpoint_and_cert_sou ) -@pytest.mark.parametrize( - "client_class", - [HostProjectRegistrationServiceClient, HostProjectRegistrationServiceAsyncClient], -) +@pytest.mark.parametrize("client_class", [HostProjectRegistrationServiceClient]) @mock.patch.object( HostProjectRegistrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(HostProjectRegistrationServiceClient), ) -@mock.patch.object( - HostProjectRegistrationServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(HostProjectRegistrationServiceAsyncClient), -) def test_host_project_registration_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -1038,16 +960,6 @@ def test_host_project_registration_service_client_client_api_endpoint(client_cla @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -1083,18 +995,6 @@ def test_host_project_registration_service_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), ( HostProjectRegistrationServiceClient, transports.HostProjectRegistrationServiceRestTransport, @@ -1127,96 +1027,6 @@ def test_host_project_registration_service_client_client_options_credentials_fil ) -def test_host_project_registration_service_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = HostProjectRegistrationServiceClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_host_project_registration_service_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1224,34 +1034,114 @@ def test_host_project_registration_service_client_create_channel_credentials_fil dict, ], ) -def test_create_host_project_registration(request_type, transport: str = "grpc"): +def test_create_host_project_registration_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["host_project_registration"] = { + "name": "name_value", + "gcp_project": "gcp_project_value", + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration( + # Determine if the message type is proto-plus or protobuf + test_field = host_project_registration_service.CreateHostProjectRegistrationRequest.meta.fields[ + "host_project_registration" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "host_project_registration" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["host_project_registration"][field]) + ): + del request_init["host_project_registration"][field][i][subfield] + else: + del request_init["host_project_registration"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration( name="name_value", gcp_project="gcp_project_value", ) - response = client.create_host_project_registration(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - host_project_registration_service.CreateHostProjectRegistrationRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_host_project_registration(request) # Establish that the response is the type that we expect. assert isinstance( @@ -1261,71 +1151,13 @@ def test_create_host_project_registration(request_type, transport: str = "grpc") assert response.gcp_project == "gcp_project_value" -def test_create_host_project_registration_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_host_project_registration() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.CreateHostProjectRegistrationRequest() - ) - - -def test_create_host_project_registration_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = host_project_registration_service.CreateHostProjectRegistrationRequest( - parent="parent_value", - host_project_registration_id="host_project_registration_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_host_project_registration(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == host_project_registration_service.CreateHostProjectRegistrationRequest( - parent="parent_value", - host_project_registration_id="host_project_registration_id_value", - ) - - -def test_create_host_project_registration_use_cached_wrapped_rpc(): +def test_create_host_project_registration_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1346,6 +1178,7 @@ def test_create_host_project_registration_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_host_project_registration ] = mock_rpc + request = {} client.create_host_project_registration(request) @@ -1359,231 +1192,268 @@ def test_create_host_project_registration_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_host_project_registration_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_create_host_project_registration_rest_required_fields( + request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, +): + transport_class = transports.HostProjectRegistrationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["host_project_registration_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - ) - response = await client.create_host_project_registration() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.CreateHostProjectRegistrationRequest() - ) + # verify fields with default values are dropped + assert "hostProjectRegistrationId" not in jsonified_request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_host_project_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_create_host_project_registration_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present + assert "hostProjectRegistrationId" in jsonified_request + assert ( + jsonified_request["hostProjectRegistrationId"] + == request_init["host_project_registration_id"] + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "hostProjectRegistrationId" + ] = "host_project_registration_id_value" - # Ensure method has been cached - assert ( - client._client._transport.create_host_project_registration - in client._client._transport._wrapped_methods - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_host_project_registration._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("host_project_registration_id",)) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_host_project_registration - ] = mock_rpc + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "hostProjectRegistrationId" in jsonified_request + assert ( + jsonified_request["hostProjectRegistrationId"] + == "host_project_registration_id_value" + ) - request = {} - await client.create_host_project_registration(request) + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - await client.create_host_project_registration(request) + response_value = Response() + response_value.status_code = 200 - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_create_host_project_registration_async( - transport: str = "grpc_asyncio", - request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, -): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + response = client.create_host_project_registration(request) + + expected_params = [ + ( + "hostProjectRegistrationId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_host_project_registration_rest_unset_required_fields(): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + unset_fields = ( + transport.create_host_project_registration._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("hostProjectRegistrationId",)) + & set( + ( + "parent", + "hostProjectRegistrationId", + "hostProjectRegistration", + ) + ) + ) - # Mock the actual call within the gRPC stub, and fake the request. + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_host_project_registration_rest_interceptors(null_interceptor): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HostProjectRegistrationServiceRestInterceptor(), + ) + client = HostProjectRegistrationServiceClient(transport=transport) with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_create_host_project_registration", + ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "pre_create_host_project_registration", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + host_project_registration_service.CreateHostProjectRegistrationRequest.pb( + host_project_registration_service.CreateHostProjectRegistrationRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + host_project_registration_service.HostProjectRegistration.to_json( + host_project_registration_service.HostProjectRegistration() ) ) - response = await client.create_host_project_registration(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] request = ( host_project_registration_service.CreateHostProjectRegistrationRequest() ) - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance( - response, host_project_registration_service.HostProjectRegistration - ) - assert response.name == "name_value" - assert response.gcp_project == "gcp_project_value" + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = host_project_registration_service.HostProjectRegistration() + client.create_host_project_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -@pytest.mark.asyncio -async def test_create_host_project_registration_async_from_dict(): - await test_create_host_project_registration_async(request_type=dict) + pre.assert_called_once() + post.assert_called_once() -def test_create_host_project_registration_field_headers(): +def test_create_host_project_registration_rest_bad_request( + transport: str = "rest", + request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.CreateHostProjectRegistrationRequest() - - request.parent = "parent_value" + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - call.return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value client.create_host_project_registration(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - -@pytest.mark.asyncio -async def test_create_host_project_registration_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_create_host_project_registration_rest_flattened(): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.CreateHostProjectRegistrationRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration() - ) - await client.create_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() -def test_create_host_project_registration_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_host_project_registration( + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", host_project_registration=host_project_registration_service.HostProjectRegistration( name="name_value" ), host_project_registration_id="host_project_registration_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_host_project_registration(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].host_project_registration - mock_val = host_project_registration_service.HostProjectRegistration( - name="name_value" + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" + % client.transport._host, + args[1], ) - assert arg == mock_val - arg = args[0].host_project_registration_id - mock_val = "host_project_registration_id_value" - assert arg == mock_val -def test_create_host_project_registration_flattened_error(): +def test_create_host_project_registration_rest_flattened_error(transport: str = "rest"): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1599,67 +1469,11 @@ def test_create_host_project_registration_flattened_error(): ) -@pytest.mark.asyncio -async def test_create_host_project_registration_flattened_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_host_project_registration( - parent="parent_value", - host_project_registration=host_project_registration_service.HostProjectRegistration( - name="name_value" - ), - host_project_registration_id="host_project_registration_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].host_project_registration - mock_val = host_project_registration_service.HostProjectRegistration( - name="name_value" - ) - assert arg == mock_val - arg = args[0].host_project_registration_id - mock_val = "host_project_registration_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_host_project_registration_flattened_error_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_host_project_registration_rest_error(): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_host_project_registration( - host_project_registration_service.CreateHostProjectRegistrationRequest(), - parent="parent_value", - host_project_registration=host_project_registration_service.HostProjectRegistration( - name="name_value" - ), - host_project_registration_id="host_project_registration_id_value", - ) - @pytest.mark.parametrize( "request_type", @@ -1668,32 +1482,38 @@ async def test_create_host_project_registration_flattened_error_async(): dict, ], ) -def test_get_host_project_registration(request_type, transport: str = "grpc"): +def test_get_host_project_registration_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration( name="name_value", gcp_project="gcp_project_value", ) - response = client.get_host_project_registration(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = host_project_registration_service.GetHostProjectRegistrationRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_host_project_registration(request) # Establish that the response is the type that we expect. assert isinstance( @@ -1703,69 +1523,13 @@ def test_get_host_project_registration(request_type, transport: str = "grpc"): assert response.gcp_project == "gcp_project_value" -def test_get_host_project_registration_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_host_project_registration() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.GetHostProjectRegistrationRequest() - ) - - -def test_get_host_project_registration_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = host_project_registration_service.GetHostProjectRegistrationRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_host_project_registration(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == host_project_registration_service.GetHostProjectRegistrationRequest( - name="name_value", - ) - - -def test_get_host_project_registration_use_cached_wrapped_rpc(): +def test_get_host_project_registration_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1786,6 +1550,7 @@ def test_get_host_project_registration_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_host_project_registration ] = mock_rpc + request = {} client.get_host_project_registration(request) @@ -1799,274 +1564,250 @@ def test_get_host_project_registration_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_host_project_registration_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - ) - response = await client.get_host_project_registration() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.GetHostProjectRegistrationRequest() - ) - - -@pytest.mark.asyncio -async def test_get_host_project_registration_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_host_project_registration_rest_required_fields( + request_type=host_project_registration_service.GetHostProjectRegistrationRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.HostProjectRegistrationServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_host_project_registration - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_host_project_registration - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_host_project_registration(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_host_project_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_host_project_registration(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_host_project_registration._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_host_project_registration_async( - transport: str = "grpc_asyncio", - request_type=host_project_registration_service.GetHostProjectRegistrationRequest, -): - client = HostProjectRegistrationServiceAsyncClient( + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - ) - response = await client.get_host_project_registration(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = host_project_registration_service.GetHostProjectRegistrationRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance( - response, host_project_registration_service.HostProjectRegistration - ) - assert response.name == "name_value" - assert response.gcp_project == "gcp_project_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_host_project_registration(request) -@pytest.mark.asyncio -async def test_get_host_project_registration_async_from_dict(): - await test_get_host_project_registration_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_host_project_registration_field_headers(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_host_project_registration_rest_unset_required_fields(): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.GetHostProjectRegistrationRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - call.return_value = host_project_registration_service.HostProjectRegistration() - client.get_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_host_project_registration._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_host_project_registration_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_host_project_registration_rest_interceptors(null_interceptor): + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HostProjectRegistrationServiceRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.GetHostProjectRegistrationRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = HostProjectRegistrationServiceClient(transport=transport) with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_get_host_project_registration", + ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "pre_get_host_project_registration", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + host_project_registration_service.GetHostProjectRegistrationRequest.pb( + host_project_registration_service.GetHostProjectRegistrationRequest() + ) ) - await client.get_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_host_project_registration_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_host_project_registration( - name="name_value", + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + host_project_registration_service.HostProjectRegistration.to_json( + host_project_registration_service.HostProjectRegistration() + ) ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + request = host_project_registration_service.GetHostProjectRegistrationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = host_project_registration_service.HostProjectRegistration() + + client.get_host_project_registration( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() -def test_get_host_project_registration_flattened_error(): +def test_get_host_project_registration_rest_bad_request( + transport: str = "rest", + request_type=host_project_registration_service.GetHostProjectRegistrationRequest, +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_host_project_registration( - host_project_registration_service.GetHostProjectRegistrationRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_host_project_registration(request) -@pytest.mark.asyncio -async def test_get_host_project_registration_flattened_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_get_host_project_registration_rest_flattened(): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_host_project_registration), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = host_project_registration_service.HostProjectRegistration() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = host_project_registration_service.HostProjectRegistration() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.HostProjectRegistration() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_host_project_registration( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = host_project_registration_service.HostProjectRegistration.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_host_project_registration(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/hostProjectRegistrations/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_host_project_registration_flattened_error_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_get_host_project_registration_rest_flattened_error(transport: str = "rest"): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_host_project_registration( + client.get_host_project_registration( host_project_registration_service.GetHostProjectRegistrationRequest(), name="name_value", ) +def test_get_host_project_registration_rest_error(): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2074,110 +1815,52 @@ async def test_get_host_project_registration_flattened_error_async(): dict, ], ) -def test_list_host_project_registrations(request_type, transport: str = "grpc"): +def test_list_host_project_registrations_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( host_project_registration_service.ListHostProjectRegistrationsResponse( next_page_token="next_page_token_value", ) ) - response = client.list_host_project_registrations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - host_project_registration_service.ListHostProjectRegistrationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse.pb( + return_value + ) ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_host_project_registrations(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListHostProjectRegistrationsPager) assert response.next_page_token == "next_page_token_value" -def test_list_host_project_registrations_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_host_project_registrations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.ListHostProjectRegistrationsRequest() - ) - - -def test_list_host_project_registrations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = host_project_registration_service.ListHostProjectRegistrationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_host_project_registrations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == host_project_registration_service.ListHostProjectRegistrationsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - -def test_list_host_project_registrations_use_cached_wrapped_rpc(): +def test_list_host_project_registrations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2198,6 +1881,7 @@ def test_list_host_project_registrations_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_host_project_registrations ] = mock_rpc + request = {} client.list_host_project_registrations(request) @@ -2211,289 +1895,279 @@ def test_list_host_project_registrations_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_host_project_registrations_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_list_host_project_registrations_rest_required_fields( + request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, +): + transport_class = transports.HostProjectRegistrationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.ListHostProjectRegistrationsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_host_project_registrations() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == host_project_registration_service.ListHostProjectRegistrationsRequest() - ) + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_host_project_registrations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_list_host_project_registrations_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + jsonified_request["parent"] = "parent_value" - # Ensure method has been cached - assert ( - client._client._transport.list_host_project_registrations - in client._client._transport._wrapped_methods + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_host_project_registrations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_host_project_registrations - ] = mock_rpc + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - request = {} - await client.list_host_project_registrations(request) + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Designate an appropriate value for the returned response. + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - await client.list_host_project_registrations(request) + response_value = Response() + response_value.status_code = 200 - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Convert return value to protobuf type + return_value = host_project_registration_service.ListHostProjectRegistrationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_list_host_project_registrations_async( - transport: str = "grpc_asyncio", - request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, -): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + response = client.list_host_project_registrations(request) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.ListHostProjectRegistrationsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_host_project_registrations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - host_project_registration_service.ListHostProjectRegistrationsRequest() - ) - assert args[0] == request - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListHostProjectRegistrationsAsyncPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_host_project_registrations_async_from_dict(): - await test_list_host_project_registrations_async(request_type=dict) - - -def test_list_host_project_registrations_field_headers(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_host_project_registrations_rest_unset_required_fields(): + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.ListHostProjectRegistrationsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - call.return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse() + unset_fields = transport.list_host_project_registrations._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) ) - client.list_host_project_registrations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + & set(("parent",)) + ) -@pytest.mark.asyncio -async def test_list_host_project_registrations_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_host_project_registrations_rest_interceptors(null_interceptor): + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.HostProjectRegistrationServiceRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = host_project_registration_service.ListHostProjectRegistrationsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = HostProjectRegistrationServiceClient(transport=transport) with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.ListHostProjectRegistrationsResponse() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_list_host_project_registrations", + ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "pre_list_host_project_registrations", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + host_project_registration_service.ListHostProjectRegistrationsRequest.pb( + host_project_registration_service.ListHostProjectRegistrationsRequest() + ) ) - await client.list_host_project_registrations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } -def test_list_host_project_registrations_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( + host_project_registration_service.ListHostProjectRegistrationsResponse() + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + request = ( + host_project_registration_service.ListHostProjectRegistrationsRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( host_project_registration_service.ListHostProjectRegistrationsResponse() ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. + client.list_host_project_registrations( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_host_project_registrations_flattened_error(): +def test_list_host_project_registrations_rest_bad_request( + transport: str = "rest", + request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_host_project_registrations( - host_project_registration_service.ListHostProjectRegistrationsRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_host_project_registrations(request) -@pytest.mark.asyncio -async def test_list_host_project_registrations_flattened_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_list_host_project_registrations_rest_flattened(): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( host_project_registration_service.ListHostProjectRegistrationsResponse() ) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_host_project_registrations( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_host_project_registrations(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_host_project_registrations_flattened_error_async(): - client = HostProjectRegistrationServiceAsyncClient( +def test_list_host_project_registrations_rest_flattened_error(transport: str = "rest"): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_host_project_registrations( + client.list_host_project_registrations( host_project_registration_service.ListHostProjectRegistrationsRequest(), parent="parent_value", ) -def test_list_host_project_registrations_pager(transport_name: str = "grpc"): +def test_list_host_project_registrations_rest_pager(transport: str = "rest"): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( host_project_registration_service.ListHostProjectRegistrationsResponse( host_project_registrations=[ host_project_registration_service.HostProjectRegistration(), @@ -2518,22 +2192,26 @@ def test_list_host_project_registrations_pager(transport_name: str = "grpc"): host_project_registration_service.HostProjectRegistration(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_host_project_registrations( - request={}, retry=retry, timeout=timeout + # Wrap the values into proper Response objs + response = tuple( + host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( + x + ) + for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_host_project_registrations(request=sample_request) results = list(pager) assert len(results) == 6 @@ -2542,3300 +2220,791 @@ def test_list_host_project_registrations_pager(transport_name: str = "grpc"): for i in results ) - -def test_list_host_project_registrations_pages(transport_name: str = "grpc"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="abc", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[], - next_page_token="def", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="ghi", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - ), - RuntimeError, + pages = list( + client.list_host_project_registrations(request=sample_request).pages ) - pages = list(client.list_host_project_registrations(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.asyncio -async def test_list_host_project_registrations_async_pager(): - client = HostProjectRegistrationServiceAsyncClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="abc", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[], - next_page_token="def", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="ghi", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_host_project_registrations( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all( - isinstance(i, host_project_registration_service.HostProjectRegistration) - for i in responses - ) - - -@pytest.mark.asyncio -async def test_list_host_project_registrations_async_pages(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_host_project_registrations), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="abc", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[], - next_page_token="def", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="ghi", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_host_project_registrations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - host_project_registration_service.CreateHostProjectRegistrationRequest, - dict, - ], -) -def test_create_host_project_registration_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["host_project_registration"] = { - "name": "name_value", - "gcp_project": "gcp_project_value", - "create_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = host_project_registration_service.CreateHostProjectRegistrationRequest.meta.fields[ - "host_project_registration" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "host_project_registration" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["host_project_registration"][field]) - ): - del request_init["host_project_registration"][field][i][subfield] - else: - del request_init["host_project_registration"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_host_project_registration(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, host_project_registration_service.HostProjectRegistration - ) - assert response.name == "name_value" - assert response.gcp_project == "gcp_project_value" - - -def test_create_host_project_registration_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_host_project_registration - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_host_project_registration - ] = mock_rpc - - request = {} - client.create_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_host_project_registration(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_host_project_registration_rest_required_fields( - request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, -): - transport_class = transports.HostProjectRegistrationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["host_project_registration_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - assert "hostProjectRegistrationId" not in jsonified_request - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_host_project_registration._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "hostProjectRegistrationId" in jsonified_request - assert ( - jsonified_request["hostProjectRegistrationId"] - == request_init["host_project_registration_id"] - ) - - jsonified_request["parent"] = "parent_value" - jsonified_request[ - "hostProjectRegistrationId" - ] = "host_project_registration_id_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_host_project_registration._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("host_project_registration_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "hostProjectRegistrationId" in jsonified_request - assert ( - jsonified_request["hostProjectRegistrationId"] - == "host_project_registration_id_value" - ) - - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_host_project_registration(request) - - expected_params = [ - ( - "hostProjectRegistrationId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_host_project_registration_rest_unset_required_fields(): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.create_host_project_registration._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set(("hostProjectRegistrationId",)) - & set( - ( - "parent", - "hostProjectRegistrationId", - "hostProjectRegistration", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_host_project_registration_rest_interceptors(null_interceptor): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.HostProjectRegistrationServiceRestInterceptor(), - ) - client = HostProjectRegistrationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "post_create_host_project_registration", - ) as post, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "pre_create_host_project_registration", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = ( - host_project_registration_service.CreateHostProjectRegistrationRequest.pb( - host_project_registration_service.CreateHostProjectRegistrationRequest() - ) - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - host_project_registration_service.HostProjectRegistration.to_json( - host_project_registration_service.HostProjectRegistration() - ) - ) - - request = ( - host_project_registration_service.CreateHostProjectRegistrationRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = host_project_registration_service.HostProjectRegistration() - - client.create_host_project_registration( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_host_project_registration_rest_bad_request( - transport: str = "rest", - request_type=host_project_registration_service.CreateHostProjectRegistrationRequest, -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_host_project_registration(request) - - -def test_create_host_project_registration_rest_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - host_project_registration=host_project_registration_service.HostProjectRegistration( - name="name_value" - ), - host_project_registration_id="host_project_registration_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_host_project_registration(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" - % client.transport._host, - args[1], - ) - - -def test_create_host_project_registration_rest_flattened_error(transport: str = "rest"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_host_project_registration( - host_project_registration_service.CreateHostProjectRegistrationRequest(), - parent="parent_value", - host_project_registration=host_project_registration_service.HostProjectRegistration( - name="name_value" - ), - host_project_registration_id="host_project_registration_id_value", - ) - - -def test_create_host_project_registration_rest_error(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - host_project_registration_service.GetHostProjectRegistrationRequest, - dict, - ], -) -def test_get_host_project_registration_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration( - name="name_value", - gcp_project="gcp_project_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_host_project_registration(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, host_project_registration_service.HostProjectRegistration - ) - assert response.name == "name_value" - assert response.gcp_project == "gcp_project_value" - - -def test_get_host_project_registration_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_host_project_registration - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_host_project_registration - ] = mock_rpc - - request = {} - client.get_host_project_registration(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_host_project_registration(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_host_project_registration_rest_required_fields( - request_type=host_project_registration_service.GetHostProjectRegistrationRequest, -): - transport_class = transports.HostProjectRegistrationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_host_project_registration._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_host_project_registration._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_host_project_registration(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_host_project_registration_rest_unset_required_fields(): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_host_project_registration._get_unset_required_fields( - {} - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_host_project_registration_rest_interceptors(null_interceptor): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.HostProjectRegistrationServiceRestInterceptor(), - ) - client = HostProjectRegistrationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "post_get_host_project_registration", - ) as post, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "pre_get_host_project_registration", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = ( - host_project_registration_service.GetHostProjectRegistrationRequest.pb( - host_project_registration_service.GetHostProjectRegistrationRequest() - ) - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - host_project_registration_service.HostProjectRegistration.to_json( - host_project_registration_service.HostProjectRegistration() - ) - ) - - request = host_project_registration_service.GetHostProjectRegistrationRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = host_project_registration_service.HostProjectRegistration() - - client.get_host_project_registration( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_host_project_registration_rest_bad_request( - transport: str = "rest", - request_type=host_project_registration_service.GetHostProjectRegistrationRequest, -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_host_project_registration(request) - - -def test_get_host_project_registration_rest_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = host_project_registration_service.HostProjectRegistration() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/hostProjectRegistrations/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = host_project_registration_service.HostProjectRegistration.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_host_project_registration(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/hostProjectRegistrations/*}" - % client.transport._host, - args[1], - ) - - -def test_get_host_project_registration_rest_flattened_error(transport: str = "rest"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_host_project_registration( - host_project_registration_service.GetHostProjectRegistrationRequest(), - name="name_value", - ) - - -def test_get_host_project_registration_rest_error(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - host_project_registration_service.ListHostProjectRegistrationsRequest, - dict, - ], -) -def test_list_host_project_registrations_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - next_page_token="next_page_token_value", - ) - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_host_project_registrations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListHostProjectRegistrationsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_host_project_registrations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_host_project_registrations - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_host_project_registrations - ] = mock_rpc - - request = {} - client.list_host_project_registrations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_host_project_registrations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_host_project_registrations_rest_required_fields( - request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, -): - transport_class = transports.HostProjectRegistrationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_host_project_registrations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_host_project_registrations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = host_project_registration_service.ListHostProjectRegistrationsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_host_project_registrations(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_host_project_registrations_rest_unset_required_fields(): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_host_project_registrations._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_host_project_registrations_rest_interceptors(null_interceptor): - transport = transports.HostProjectRegistrationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.HostProjectRegistrationServiceRestInterceptor(), - ) - client = HostProjectRegistrationServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "post_list_host_project_registrations", - ) as post, mock.patch.object( - transports.HostProjectRegistrationServiceRestInterceptor, - "pre_list_host_project_registrations", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = ( - host_project_registration_service.ListHostProjectRegistrationsRequest.pb( - host_project_registration_service.ListHostProjectRegistrationsRequest() - ) - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - - request = ( - host_project_registration_service.ListHostProjectRegistrationsRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - - client.list_host_project_registrations( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_host_project_registrations_rest_bad_request( - transport: str = "rest", - request_type=host_project_registration_service.ListHostProjectRegistrationsRequest, -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_host_project_registrations(request) - - -def test_list_host_project_registrations_rest_flattened(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse() - ) - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - host_project_registration_service.ListHostProjectRegistrationsResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_host_project_registrations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/hostProjectRegistrations" - % client.transport._host, - args[1], - ) - - -def test_list_host_project_registrations_rest_flattened_error(transport: str = "rest"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_host_project_registrations( - host_project_registration_service.ListHostProjectRegistrationsRequest(), - parent="parent_value", - ) - - -def test_list_host_project_registrations_rest_pager(transport: str = "rest"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="abc", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[], - next_page_token="def", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - ], - next_page_token="ghi", - ), - host_project_registration_service.ListHostProjectRegistrationsResponse( - host_project_registrations=[ - host_project_registration_service.HostProjectRegistration(), - host_project_registration_service.HostProjectRegistration(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - host_project_registration_service.ListHostProjectRegistrationsResponse.to_json( - x - ) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_host_project_registrations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, host_project_registration_service.HostProjectRegistration) - for i in results - ) - - pages = list( - client.list_host_project_registrations(request=sample_request).pages - ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = HostProjectRegistrationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = HostProjectRegistrationServiceClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.HostProjectRegistrationServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - transports.HostProjectRegistrationServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = HostProjectRegistrationServiceClient.get_transport_class( - transport_name - )( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.HostProjectRegistrationServiceGrpcTransport, - ) - - -def test_host_project_registration_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.HostProjectRegistrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_host_project_registration_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.HostProjectRegistrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_host_project_registration", - "get_host_project_registration", - "list_host_project_registrations", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_host_project_registration_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.HostProjectRegistrationServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_host_project_registration_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.HostProjectRegistrationServiceTransport() - adc.assert_called_once() - - -def test_host_project_registration_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - HostProjectRegistrationServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - ], -) -def test_host_project_registration_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - transports.HostProjectRegistrationServiceRestTransport, - ], -) -def test_host_project_registration_service_transport_auth_gdch_credentials( - transport_class, -): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.HostProjectRegistrationServiceGrpcTransport, grpc_helpers), - ( - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - grpc_helpers_async, - ), - ], -) -def test_host_project_registration_service_transport_create_channel( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - ], -) -def test_host_project_registration_service_grpc_transport_client_cert_source_for_mtls( - transport_class, -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_host_project_registration_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.HostProjectRegistrationServiceRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_host_project_registration_service_host_no_port(transport_name): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_host_project_registration_service_host_with_port(transport_name): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_host_project_registration_service_client_transport_session_collision( - transport_name, -): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = HostProjectRegistrationServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = HostProjectRegistrationServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_host_project_registration._session - session2 = client2.transport.create_host_project_registration._session - assert session1 != session2 - session1 = client1.transport.get_host_project_registration._session - session2 = client2.transport.get_host_project_registration._session - assert session1 != session2 - session1 = client1.transport.list_host_project_registrations._session - session2 = client2.transport.list_host_project_registrations._session - assert session1 != session2 - - -def test_host_project_registration_service_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.HostProjectRegistrationServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_host_project_registration_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.HostProjectRegistrationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - ], -) -def test_host_project_registration_service_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.HostProjectRegistrationServiceGrpcTransport, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, - ], -) -def test_host_project_registration_service_transport_channel_mtls_with_adc( - transport_class, -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_host_project_registration_path(): - project = "squid" - location = "clam" - host_project_registration = "whelk" - expected = "projects/{project}/locations/{location}/hostProjectRegistrations/{host_project_registration}".format( - project=project, - location=location, - host_project_registration=host_project_registration, - ) - actual = HostProjectRegistrationServiceClient.host_project_registration_path( - project, location, host_project_registration - ) - assert expected == actual - - -def test_parse_host_project_registration_path(): - expected = { - "project": "octopus", - "location": "oyster", - "host_project_registration": "nudibranch", - } - path = HostProjectRegistrationServiceClient.host_project_registration_path( - **expected - ) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_host_project_registration_path( - path - ) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = HostProjectRegistrationServiceClient.common_billing_account_path( - billing_account - ) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = HostProjectRegistrationServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_billing_account_path( - path - ) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = HostProjectRegistrationServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = HostProjectRegistrationServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = HostProjectRegistrationServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = HostProjectRegistrationServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = HostProjectRegistrationServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = HostProjectRegistrationServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = HostProjectRegistrationServiceClient.common_location_path( - project, location - ) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = HostProjectRegistrationServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = HostProjectRegistrationServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = HostProjectRegistrationServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = HostProjectRegistrationServiceClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # It is an error to provide an api_key and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options=options, + transport=transport, + ) - response = client.list_operations(request) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # It is an error to provide scopes and a transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = HostProjectRegistrationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -def test_delete_operation(transport: str = "grpc"): - client = HostProjectRegistrationServiceClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.HostProjectRegistrationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + client = HostProjectRegistrationServiceClient(transport=transport) + assert client.transport is transport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_class", + [ + transports.HostProjectRegistrationServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = HostProjectRegistrationServiceClient.get_transport_class( + transport_name + )( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_host_project_registration_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.HostProjectRegistrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert response is None +def test_host_project_registration_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.HostProjectRegistrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation_field_headers(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_host_project_registration", + "get_host_project_registration", + "list_host_project_registrations", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + with pytest.raises(NotImplementedError): + transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" +def test_host_project_registration_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.HostProjectRegistrationServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_host_project_registration_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.host_project_registration_service.transports.HostProjectRegistrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.HostProjectRegistrationServiceTransport() + adc.assert_called_once() -def test_delete_operation_from_dict(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } +def test_host_project_registration_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + HostProjectRegistrationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_host_project_registration_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.HostProjectRegistrationServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_host_project_registration_service_host_no_port(transport_name): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_host_project_registration_service_host_with_port(transport_name): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_host_project_registration_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = HostProjectRegistrationServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = HostProjectRegistrationServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_host_project_registration._session + session2 = client2.transport.create_host_project_registration._session + assert session1 != session2 + session1 = client1.transport.get_host_project_registration._session + session2 = client2.transport.get_host_project_registration._session + assert session1 != session2 + session1 = client1.transport.list_host_project_registrations._session + session2 = client2.transport.list_host_project_registrations._session + assert session1 != session2 -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_host_project_registration_path(): + project = "squid" + location = "clam" + host_project_registration = "whelk" + expected = "projects/{project}/locations/{location}/hostProjectRegistrations/{host_project_registration}".format( + project=project, + location=location, + host_project_registration=host_project_registration, + ) + actual = HostProjectRegistrationServiceClient.host_project_registration_path( + project, location, host_project_registration ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_host_project_registration_path(): + expected = { + "project": "octopus", + "location": "oyster", + "host_project_registration": "nudibranch", + } + path = HostProjectRegistrationServiceClient.host_project_registration_path( + **expected + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_host_project_registration_path( + path + ) + assert expected == actual -def test_cancel_operation_from_dict(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = HostProjectRegistrationServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = HostProjectRegistrationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_billing_account_path( + path ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, ) + actual = HostProjectRegistrationServiceClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = HostProjectRegistrationServiceClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = HostProjectRegistrationServiceClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = HostProjectRegistrationServiceClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, ) + actual = HostProjectRegistrationServiceClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = HostProjectRegistrationServiceClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = HostProjectRegistrationServiceClient.common_location_path( + project, location + ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = HostProjectRegistrationServiceClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = HostProjectRegistrationServiceClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.HostProjectRegistrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = HostProjectRegistrationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = HostProjectRegistrationServiceAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): client = HostProjectRegistrationServiceClient( - credentials=ga_credentials.AnonymousCredentials() + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials() +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = HostProjectRegistrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = HostProjectRegistrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = HostProjectRegistrationServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -5853,7 +3022,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = HostProjectRegistrationServiceClient( @@ -5872,11 +3040,7 @@ def test_client_ctx(): [ ( HostProjectRegistrationServiceClient, - transports.HostProjectRegistrationServiceGrpcTransport, - ), - ( - HostProjectRegistrationServiceAsyncClient, - transports.HostProjectRegistrationServiceGrpcAsyncIOTransport, + transports.HostProjectRegistrationServiceRestTransport, ), ], ) diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py index 947a54eb86f7..db139191d3f8 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py @@ -47,7 +47,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.linting_service import ( - LintingServiceAsyncClient, LintingServiceClient, transports, ) @@ -211,11 +210,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LintingServiceAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -300,7 +294,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc"), (LintingServiceClient, transports.LintingServiceRestTransport, "rest"), ], ) @@ -380,8 +373,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (LintingServiceClient, "grpc"), - (LintingServiceAsyncClient, "grpc_asyncio"), (LintingServiceClient, "rest"), ], ) @@ -406,8 +397,6 @@ def test_linting_service_client_from_service_account_info(client_class, transpor @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.LintingServiceGrpcTransport, "grpc"), - (transports.LintingServiceGrpcAsyncIOTransport, "grpc_asyncio"), (transports.LintingServiceRestTransport, "rest"), ], ) @@ -432,8 +421,6 @@ def test_linting_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (LintingServiceClient, "grpc"), - (LintingServiceAsyncClient, "grpc_asyncio"), (LintingServiceClient, "rest"), ], ) @@ -465,24 +452,17 @@ def test_linting_service_client_from_service_account_file(client_class, transpor def test_linting_service_client_get_transport_class(): transport = LintingServiceClient.get_transport_class() available_transports = [ - transports.LintingServiceGrpcTransport, transports.LintingServiceRestTransport, ] assert transport in available_transports - transport = LintingServiceClient.get_transport_class("grpc") - assert transport == transports.LintingServiceGrpcTransport + transport = LintingServiceClient.get_transport_class("rest") + assert transport == transports.LintingServiceRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc"), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), (LintingServiceClient, transports.LintingServiceRestTransport, "rest"), ], ) @@ -491,11 +471,6 @@ def test_linting_service_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LintingServiceAsyncClient), -) def test_linting_service_client_client_options( client_class, transport_class, transport_name ): @@ -629,20 +604,6 @@ def test_linting_service_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc", "true"), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc", "false"), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), (LintingServiceClient, transports.LintingServiceRestTransport, "rest", "true"), (LintingServiceClient, transports.LintingServiceRestTransport, "rest", "false"), ], @@ -652,11 +613,6 @@ def test_linting_service_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LintingServiceAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_linting_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -760,19 +716,12 @@ def test_linting_service_client_mtls_env_auto( ) -@pytest.mark.parametrize( - "client_class", [LintingServiceClient, LintingServiceAsyncClient] -) +@pytest.mark.parametrize("client_class", [LintingServiceClient]) @mock.patch.object( LintingServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(LintingServiceAsyncClient), -) def test_linting_service_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -864,19 +813,12 @@ def test_linting_service_client_get_mtls_endpoint_and_cert_source(client_class): ) -@pytest.mark.parametrize( - "client_class", [LintingServiceClient, LintingServiceAsyncClient] -) +@pytest.mark.parametrize("client_class", [LintingServiceClient]) @mock.patch.object( LintingServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LintingServiceClient), ) -@mock.patch.object( - LintingServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(LintingServiceAsyncClient), -) def test_linting_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -953,12 +895,6 @@ def test_linting_service_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport, "grpc"), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), (LintingServiceClient, transports.LintingServiceRestTransport, "rest"), ], ) @@ -990,18 +926,6 @@ def test_linting_service_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - LintingServiceClient, - transports.LintingServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), (LintingServiceClient, transports.LintingServiceRestTransport, "rest", None), ], ) @@ -1029,96 +953,6 @@ def test_linting_service_client_client_options_credentials_file( ) -def test_linting_service_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = LintingServiceClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - LintingServiceClient, - transports.LintingServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - LintingServiceAsyncClient, - transports.LintingServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_linting_service_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1126,30 +960,36 @@ def test_linting_service_client_create_channel_credentials_file( dict, ], ) -def test_get_style_guide(request_type, transport: str = "grpc"): +def test_get_style_guide_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide( name="name_value", linter=common_fields.Linter.SPECTRAL, ) - response = client.get_style_guide(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = linting_service.GetStyleGuideRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_style_guide(request) # Establish that the response is the type that we expect. assert isinstance(response, linting_service.StyleGuide) @@ -1157,60 +997,13 @@ def test_get_style_guide(request_type, transport: str = "grpc"): assert response.linter == common_fields.Linter.SPECTRAL -def test_get_style_guide_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_style_guide() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideRequest() - - -def test_get_style_guide_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = linting_service.GetStyleGuideRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_style_guide(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideRequest( - name="name_value", - ) - - -def test_get_style_guide_use_cached_wrapped_rpc(): +def test_get_style_guide_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1226,6 +1019,7 @@ def test_get_style_guide_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[client._transport.get_style_guide] = mock_rpc + request = {} client.get_style_guide(request) @@ -1239,256 +1033,237 @@ def test_get_style_guide_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_style_guide_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - ) - response = await client.get_style_guide() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideRequest() - - -@pytest.mark.asyncio -async def test_get_style_guide_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_style_guide_rest_required_fields( + request_type=linting_service.GetStyleGuideRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_style_guide - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_style_guide - ] = mock_rpc - - request = {} - await client.get_style_guide(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_style_guide(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + transport_class = transports.LintingServiceRestTransport -@pytest.mark.asyncio -async def test_get_style_guide_async( - transport: str = "grpc_asyncio", request_type=linting_service.GetStyleGuideRequest -): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - ) - response = await client.get_style_guide(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = linting_service.GetStyleGuideRequest() - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_get_style_guide_async_from_dict(): - await test_get_style_guide_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_get_style_guide_field_headers(): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.GetStyleGuideRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - call.return_value = linting_service.StyleGuide() - client.get_style_guide(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_get_style_guide_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.GetStyleGuideRequest() + response = client.get_style_guide(request) - request.name = "name_value" + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide() - ) - await client.get_style_guide(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_get_style_guide_rest_unset_required_fields(): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_style_guide._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_style_guide_flattened(): - client = LintingServiceClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_style_guide_rest_interceptors(null_interceptor): + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LintingServiceRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_style_guide( - name="name_value", + client = LintingServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LintingServiceRestInterceptor, "post_get_style_guide" + ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, "pre_get_style_guide" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = linting_service.GetStyleGuideRequest.pb( + linting_service.GetStyleGuideRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = linting_service.StyleGuide.to_json( + linting_service.StyleGuide() ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + request = linting_service.GetStyleGuideRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = linting_service.StyleGuide() + + client.get_style_guide( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() -def test_get_style_guide_flattened_error(): +def test_get_style_guide_rest_bad_request( + transport: str = "rest", request_type=linting_service.GetStyleGuideRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_style_guide( - linting_service.GetStyleGuideRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_style_guide(request) -@pytest.mark.asyncio -async def test_get_style_guide_flattened_async(): - client = LintingServiceAsyncClient( + +def test_get_style_guide_rest_flattened(): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_style_guide), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_style_guide( + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_style_guide(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_style_guide_flattened_error_async(): - client = LintingServiceAsyncClient( +def test_get_style_guide_rest_flattened_error(transport: str = "rest"): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_style_guide( + client.get_style_guide( linting_service.GetStyleGuideRequest(), name="name_value", ) +def test_get_style_guide_rest_error(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1496,93 +1271,124 @@ async def test_get_style_guide_flattened_error_async(): dict, ], ) -def test_update_style_guide(request_type, transport: str = "grpc"): +def test_update_style_guide_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "style_guide": { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + } + request_init["style_guide"] = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide", + "linter": 1, + "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - response = client.update_style_guide(request) + # Determine if the message type is proto-plus or protobuf + test_field = linting_service.UpdateStyleGuideRequest.meta.fields["style_guide"] - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = linting_service.UpdateStyleGuideRequest() - assert args[0] == request + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_update_style_guide_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_style_guide() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.UpdateStyleGuideRequest() + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["style_guide"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -def test_update_style_guide_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = linting_service.UpdateStyleGuideRequest() + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["style_guide"][field])): + del request_init["style_guide"][field][i][subfield] + else: + del request_init["style_guide"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide( + name="name_value", + linter=common_fields.Linter.SPECTRAL, ) - client.update_style_guide(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.UpdateStyleGuideRequest() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_style_guide(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, linting_service.StyleGuide) + assert response.name == "name_value" + assert response.linter == common_fields.Linter.SPECTRAL -def test_update_style_guide_use_cached_wrapped_rpc(): +def test_update_style_guide_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1602,6 +1408,7 @@ def test_update_style_guide_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.update_style_guide ] = mock_rpc + request = {} client.update_style_guide(request) @@ -1615,216 +1422,223 @@ def test_update_style_guide_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_style_guide_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_update_style_guide_rest_required_fields( + request_type=linting_service.UpdateStyleGuideRequest, +): + transport_class = transports.LintingServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - ) - response = await client.update_style_guide() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.UpdateStyleGuideRequest() + # verify fields with default values are dropped + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_style_guide._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -@pytest.mark.asyncio -async def test_update_style_guide_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # verify required fields with default values are now present - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_style_guide._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) - # Ensure method has been cached - assert ( - client._client._transport.update_style_guide - in client._client._transport._wrapped_methods - ) + # verify required fields with non-default values are left alone - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.update_style_guide - ] = mock_rpc + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - request = {} - await client.update_style_guide(request) + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + response_value = Response() + response_value.status_code = 200 - await client.update_style_guide(request) + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_style_guide(request) -@pytest.mark.asyncio -async def test_update_style_guide_async( - transport: str = "grpc_asyncio", - request_type=linting_service.UpdateStyleGuideRequest, -): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_style_guide_rest_unset_required_fields(): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + unset_fields = transport.update_style_guide._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("styleGuide",))) - # Mock the actual call within the gRPC stub, and fake the request. + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_style_guide_rest_interceptors(null_interceptor): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LintingServiceRestInterceptor(), + ) + client = LintingServiceClient(transport=transport) with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LintingServiceRestInterceptor, "post_update_style_guide" + ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, "pre_update_style_guide" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = linting_service.UpdateStyleGuideRequest.pb( + linting_service.UpdateStyleGuideRequest() ) - response = await client.update_style_guide(request) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = linting_service.UpdateStyleGuideRequest() - assert args[0] == request + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = linting_service.StyleGuide.to_json( + linting_service.StyleGuide() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL + request = linting_service.UpdateStyleGuideRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = linting_service.StyleGuide() + client.update_style_guide( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -@pytest.mark.asyncio -async def test_update_style_guide_async_from_dict(): - await test_update_style_guide_async(request_type=dict) + pre.assert_called_once() + post.assert_called_once() -def test_update_style_guide_field_headers(): +def test_update_style_guide_rest_bad_request( + transport: str = "rest", request_type=linting_service.UpdateStyleGuideRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.UpdateStyleGuideRequest() - - request.style_guide.name = "name_value" + # send a request that will satisfy transcoding + request_init = { + "style_guide": { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - call.return_value = linting_service.StyleGuide() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value client.update_style_guide(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "style_guide.name=name_value", - ) in kw["metadata"] - -@pytest.mark.asyncio -async def test_update_style_guide_field_headers_async(): - client = LintingServiceAsyncClient( +def test_update_style_guide_rest_flattened(): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.UpdateStyleGuideRequest() - - request.style_guide.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide() - ) - await client.update_style_guide(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "style_guide.name=name_value", - ) in kw["metadata"] - + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuide() -def test_update_style_guide_flattened(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "style_guide": { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + } - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_style_guide( + # get truthy value for each flattened field + mock_args = dict( style_guide=linting_service.StyleGuide(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuide.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_style_guide(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].style_guide - mock_val = linting_service.StyleGuide(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{style_guide.name=projects/*/locations/*/plugins/*/styleGuide}" + % client.transport._host, + args[1], + ) -def test_update_style_guide_flattened_error(): +def test_update_style_guide_rest_flattened_error(transport: str = "rest"): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1837,56 +1651,11 @@ def test_update_style_guide_flattened_error(): ) -@pytest.mark.asyncio -async def test_update_style_guide_flattened_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_style_guide), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuide() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuide() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_style_guide( - style_guide=linting_service.StyleGuide(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].style_guide - mock_val = linting_service.StyleGuide(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_update_style_guide_flattened_error_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_update_style_guide_rest_error(): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_style_guide( - linting_service.UpdateStyleGuideRequest(), - style_guide=linting_service.StyleGuide(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - @pytest.mark.parametrize( "request_type", @@ -1895,97 +1664,50 @@ async def test_update_style_guide_flattened_error_async(): dict, ], ) -def test_get_style_guide_contents(request_type, transport: str = "grpc"): +def test_get_style_guide_contents_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuideContents( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuideContents( contents=b"contents_blob", mime_type="mime_type_value", ) - response = client.get_style_guide_contents(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = linting_service.GetStyleGuideContentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuideContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_style_guide_contents(request) + + # Establish that the response is the type that we expect. assert isinstance(response, linting_service.StyleGuideContents) assert response.contents == b"contents_blob" assert response.mime_type == "mime_type_value" -def test_get_style_guide_contents_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_style_guide_contents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideContentsRequest() - - -def test_get_style_guide_contents_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = linting_service.GetStyleGuideContentsRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_style_guide_contents(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideContentsRequest( - name="name_value", - ) - - -def test_get_style_guide_contents_use_cached_wrapped_rpc(): +def test_get_style_guide_contents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2006,6 +1728,7 @@ def test_get_style_guide_contents_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_style_guide_contents ] = mock_rpc + request = {} client.get_style_guide_contents(request) @@ -2019,544 +1742,245 @@ def test_get_style_guide_contents_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_style_guide_contents_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuideContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - ) - response = await client.get_style_guide_contents() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.GetStyleGuideContentsRequest() - - -@pytest.mark.asyncio -async def test_get_style_guide_contents_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_style_guide_contents_rest_required_fields( + request_type=linting_service.GetStyleGuideContentsRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.LintingServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_style_guide_contents - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_style_guide_contents - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_style_guide_contents(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_style_guide_contents(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_style_guide_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_style_guide_contents_async( - transport: str = "grpc_asyncio", - request_type=linting_service.GetStyleGuideContentsRequest, -): - client = LintingServiceAsyncClient( + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuideContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuideContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - ) - response = await client.get_style_guide_contents(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = linting_service.GetStyleGuideContentsRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = linting_service.StyleGuideContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuideContents) - assert response.contents == b"contents_blob" - assert response.mime_type == "mime_type_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_style_guide_contents(request) -@pytest.mark.asyncio -async def test_get_style_guide_contents_async_from_dict(): - await test_get_style_guide_contents_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_style_guide_contents_field_headers(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_style_guide_contents_rest_unset_required_fields(): + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.GetStyleGuideContentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - call.return_value = linting_service.StyleGuideContents() - client.get_style_guide_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_style_guide_contents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_style_guide_contents_field_headers_async(): - client = LintingServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_style_guide_contents_rest_interceptors(null_interceptor): + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LintingServiceRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.GetStyleGuideContentsRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = LintingServiceClient(transport=transport) with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuideContents() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LintingServiceRestInterceptor, "post_get_style_guide_contents" + ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, "pre_get_style_guide_contents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = linting_service.GetStyleGuideContentsRequest.pb( + linting_service.GetStyleGuideContentsRequest() ) - await client.get_style_guide_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = linting_service.StyleGuideContents.to_json( + linting_service.StyleGuideContents() + ) -def test_get_style_guide_contents_flattened(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = linting_service.GetStyleGuideContentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = linting_service.StyleGuideContents() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuideContents() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_style_guide_contents( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_style_guide_contents_flattened_error(): +def test_get_style_guide_contents_rest_bad_request( + transport: str = "rest", request_type=linting_service.GetStyleGuideContentsRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_style_guide_contents( - linting_service.GetStyleGuideContentsRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_style_guide_contents(request) -@pytest.mark.asyncio -async def test_get_style_guide_contents_flattened_async(): - client = LintingServiceAsyncClient( + +def test_get_style_guide_contents_rest_flattened(): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_style_guide_contents), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = linting_service.StyleGuideContents() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = linting_service.StyleGuideContents() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - linting_service.StyleGuideContents() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_style_guide_contents( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = linting_service.StyleGuideContents.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_style_guide_contents_flattened_error_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client.get_style_guide_contents(**mock_args) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_style_guide_contents( - linting_service.GetStyleGuideContentsRequest(), - name="name_value", + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}:contents" + % client.transport._host, + args[1], ) -@pytest.mark.parametrize( - "request_type", - [ - linting_service.LintSpecRequest, - dict, - ], -) -def test_lint_spec(request_type, transport: str = "grpc"): +def test_get_style_guide_contents_rest_flattened_error(transport: str = "rest"): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = linting_service.LintSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_lint_spec_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lint_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.LintSpecRequest() - - -def test_lint_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = linting_service.LintSpecRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lint_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.LintSpecRequest( + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_style_guide_contents( + linting_service.GetStyleGuideContentsRequest(), name="name_value", ) -def test_lint_spec_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.lint_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.lint_spec] = mock_rpc - request = {} - client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lint_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_lint_spec_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.lint_spec() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == linting_service.LintSpecRequest() - - -@pytest.mark.asyncio -async def test_lint_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.lint_spec - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.lint_spec - ] = mock_rpc - - request = {} - await client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.lint_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_lint_spec_async( - transport: str = "grpc_asyncio", request_type=linting_service.LintSpecRequest -): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = linting_service.LintSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_lint_spec_async_from_dict(): - await test_lint_spec_async(request_type=dict) - - -def test_lint_spec_field_headers(): +def test_get_style_guide_contents_rest_error(): client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.LintSpecRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - call.return_value = None - client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_lint_spec_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = linting_service.LintSpecRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.lint_spec), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - @pytest.mark.parametrize( "request_type", [ - linting_service.GetStyleGuideRequest, + linting_service.LintSpecRequest, dict, ], ) -def test_get_style_guide_rest(request_type): +def test_lint_spec_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2564,36 +1988,29 @@ def test_get_style_guide_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_style_guide(request) + response = client.lint_spec(request) # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL + assert response is None -def test_get_style_guide_rest_use_cached_wrapped_rpc(): +def test_lint_spec_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2607,31 +2024,29 @@ def test_get_style_guide_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_style_guide in client._transport._wrapped_methods + assert client._transport.lint_spec in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_style_guide] = mock_rpc + client._transport._wrapped_methods[client._transport.lint_spec] = mock_rpc request = {} - client.get_style_guide(request) + client.lint_spec(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_style_guide(request) + client.lint_spec(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_style_guide_rest_required_fields( - request_type=linting_service.GetStyleGuideRequest, -): +def test_lint_spec_rest_required_fields(request_type=linting_service.LintSpecRequest): transport_class = transports.LintingServiceRestTransport request_init = {} @@ -2646,7 +2061,7 @@ def test_get_style_guide_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_style_guide._get_unset_required_fields(jsonified_request) + ).lint_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2655,7 +2070,7 @@ def test_get_style_guide_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_style_guide._get_unset_required_fields(jsonified_request) + ).lint_spec._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2669,7 +2084,7 @@ def test_get_style_guide_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -2681,39 +2096,37 @@ def test_get_style_guide_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_style_guide(request) + response = client.lint_spec(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_style_guide_rest_unset_required_fields(): +def test_lint_spec_rest_unset_required_fields(): transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_style_guide._get_unset_required_fields({}) + unset_fields = transport.lint_spec._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_style_guide_rest_interceptors(null_interceptor): +def test_lint_spec_rest_interceptors(null_interceptor): transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2726,14 +2139,11 @@ def test_get_style_guide_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.LintingServiceRestInterceptor, "post_get_style_guide" - ) as post, mock.patch.object( - transports.LintingServiceRestInterceptor, "pre_get_style_guide" + transports.LintingServiceRestInterceptor, "pre_lint_spec" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = linting_service.GetStyleGuideRequest.pb( - linting_service.GetStyleGuideRequest() + pb_message = linting_service.LintSpecRequest.pb( + linting_service.LintSpecRequest() ) transcode.return_value = { "method": "post", @@ -2745,19 +2155,15 @@ def test_get_style_guide_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = linting_service.StyleGuide.to_json( - linting_service.StyleGuide() - ) - request = linting_service.GetStyleGuideRequest() + request = linting_service.LintSpecRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = linting_service.StyleGuide() - client.get_style_guide( + client.lint_spec( request, metadata=[ ("key", "val"), @@ -2766,11 +2172,10 @@ def test_get_style_guide_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_get_style_guide_rest_bad_request( - transport: str = "rest", request_type=linting_service.GetStyleGuideRequest +def test_lint_spec_rest_bad_request( + transport: str = "rest", request_type=linting_service.LintSpecRequest ): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2779,7 +2184,7 @@ def test_get_style_guide_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" + "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" } request = request_type(**request_init) @@ -2792,2969 +2197,815 @@ def test_get_style_guide_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_style_guide(request) + client.lint_spec(request) -def test_get_style_guide_rest_flattened(): +def test_lint_spec_rest_error(): client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_style_guide(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}" - % client.transport._host, - args[1], - ) - -def test_get_style_guide_rest_flattened_error(transport: str = "rest"): - client = LintingServiceClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - client.get_style_guide( - linting_service.GetStyleGuideRequest(), - name="name_value", + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - -def test_get_style_guide_rest_error(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - linting_service.UpdateStyleGuideRequest, - dict, - ], -) -def test_update_style_guide_rest(request_type): - client = LintingServiceClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # send a request that will satisfy transcoding - request_init = { - "style_guide": { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - } - request_init["style_guide"] = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide", - "linter": 1, - "contents": {"contents": b"contents_blob", "mime_type": "mime_type_value"}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = linting_service.UpdateStyleGuideRequest.meta.fields["style_guide"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["style_guide"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["style_guide"][field])): - del request_init["style_guide"][field][i][subfield] - else: - del request_init["style_guide"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide( - name="name_value", - linter=common_fields.Linter.SPECTRAL, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_style_guide(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuide) - assert response.name == "name_value" - assert response.linter == common_fields.Linter.SPECTRAL - - -def test_update_style_guide_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + with pytest.raises(ValueError): client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.update_style_guide in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) - client._transport._wrapped_methods[ - client._transport.update_style_guide - ] = mock_rpc - - request = {} - client.update_style_guide(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_style_guide(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_style_guide_rest_required_fields( - request_type=linting_service.UpdateStyleGuideRequest, -): - transport_class = transports.LintingServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_style_guide._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_style_guide._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - client = LintingServiceClient( + # It is an error to provide an api_key and a transport instance. + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.update_style_guide(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_update_style_guide_rest_unset_required_fields(): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.update_style_guide._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("styleGuide",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_style_guide_rest_interceptors(null_interceptor): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LintingServiceRestInterceptor(), - ) - client = LintingServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.LintingServiceRestInterceptor, "post_update_style_guide" - ) as post, mock.patch.object( - transports.LintingServiceRestInterceptor, "pre_update_style_guide" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = linting_service.UpdateStyleGuideRequest.pb( - linting_service.UpdateStyleGuideRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = linting_service.StyleGuide.to_json( - linting_service.StyleGuide() - ) - - request = linting_service.UpdateStyleGuideRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = linting_service.StyleGuide() - - client.update_style_guide( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_style_guide_rest_bad_request( - transport: str = "rest", request_type=linting_service.UpdateStyleGuideRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "style_guide": { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_style_guide(request) - - -def test_update_style_guide_rest_flattened(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuide() - - # get arguments that satisfy an http rule for this method - sample_request = { - "style_guide": { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - } - - # get truthy value for each flattened field - mock_args = dict( - style_guide=linting_service.StyleGuide(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuide.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_style_guide(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{style_guide.name=projects/*/locations/*/plugins/*/styleGuide}" - % client.transport._host, - args[1], - ) - - -def test_update_style_guide_rest_flattened_error(transport: str = "rest"): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. + options = client_options.ClientOptions() + options.api_key = "api_key" with pytest.raises(ValueError): - client.update_style_guide( - linting_service.UpdateStyleGuideRequest(), - style_guide=linting_service.StyleGuide(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_update_style_guide_rest_error(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - linting_service.GetStyleGuideContentsRequest, - dict, - ], -) -def test_get_style_guide_contents_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuideContents( - contents=b"contents_blob", - mime_type="mime_type_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuideContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_style_guide_contents(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, linting_service.StyleGuideContents) - assert response.contents == b"contents_blob" - assert response.mime_type == "mime_type_value" - - -def test_get_style_guide_contents_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_style_guide_contents - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_style_guide_contents - ] = mock_rpc - - request = {} - client.get_style_guide_contents(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_style_guide_contents(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_style_guide_contents_rest_required_fields( - request_type=linting_service.GetStyleGuideContentsRequest, -): - transport_class = transports.LintingServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_style_guide_contents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_style_guide_contents._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuideContents() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = linting_service.StyleGuideContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_style_guide_contents(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_style_guide_contents_rest_unset_required_fields(): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_style_guide_contents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_style_guide_contents_rest_interceptors(null_interceptor): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LintingServiceRestInterceptor(), - ) - client = LintingServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.LintingServiceRestInterceptor, "post_get_style_guide_contents" - ) as post, mock.patch.object( - transports.LintingServiceRestInterceptor, "pre_get_style_guide_contents" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = linting_service.GetStyleGuideContentsRequest.pb( - linting_service.GetStyleGuideContentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = linting_service.StyleGuideContents.to_json( - linting_service.StyleGuideContents() - ) - - request = linting_service.GetStyleGuideContentsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = linting_service.StyleGuideContents() - - client.get_style_guide_contents( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_style_guide_contents_rest_bad_request( - transport: str = "rest", request_type=linting_service.GetStyleGuideContentsRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_style_guide_contents(request) - - -def test_get_style_guide_contents_rest_flattened(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = linting_service.StyleGuideContents() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/plugins/sample3/styleGuide" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = linting_service.StyleGuideContents.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_style_guide_contents(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/plugins/*/styleGuide}:contents" - % client.transport._host, - args[1], - ) - - -def test_get_style_guide_contents_rest_flattened_error(transport: str = "rest"): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_style_guide_contents( - linting_service.GetStyleGuideContentsRequest(), - name="name_value", - ) - - -def test_get_style_guide_contents_rest_error(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - linting_service.LintSpecRequest, - dict, - ], -) -def test_lint_spec_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.lint_spec(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_lint_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.lint_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.lint_spec] = mock_rpc - - request = {} - client.lint_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lint_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_lint_spec_rest_required_fields(request_type=linting_service.LintSpecRequest): - transport_class = transports.LintingServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lint_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lint_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.lint_spec(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_lint_spec_rest_unset_required_fields(): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.lint_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lint_spec_rest_interceptors(null_interceptor): - transport = transports.LintingServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.LintingServiceRestInterceptor(), - ) - client = LintingServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.LintingServiceRestInterceptor, "pre_lint_spec" - ) as pre: - pre.assert_not_called() - pb_message = linting_service.LintSpecRequest.pb( - linting_service.LintSpecRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = linting_service.LintSpecRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.lint_spec( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_lint_spec_rest_bad_request( - transport: str = "rest", request_type=linting_service.LintSpecRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apis/sample3/versions/sample4/specs/sample5" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.lint_spec(request) - - -def test_lint_spec_rest_error(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LintingServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LintingServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LintingServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LintingServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = LintingServiceClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.LintingServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.LintingServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - transports.LintingServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = LintingServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.LintingServiceGrpcTransport, - ) - - -def test_linting_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LintingServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_linting_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.LintingServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "get_style_guide", - "update_style_guide", - "get_style_guide_contents", - "lint_spec", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_linting_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LintingServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_linting_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LintingServiceTransport() - adc.assert_called_once() - - -def test_linting_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LintingServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - ], -) -def test_linting_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - transports.LintingServiceRestTransport, - ], -) -def test_linting_service_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.LintingServiceGrpcTransport, grpc_helpers), - (transports.LintingServiceGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_linting_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - ], -) -def test_linting_service_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_linting_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.LintingServiceRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_linting_service_host_no_port(transport_name): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_linting_service_host_with_port(transport_name): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_linting_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = LintingServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = LintingServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_style_guide._session - session2 = client2.transport.get_style_guide._session - assert session1 != session2 - session1 = client1.transport.update_style_guide._session - session2 = client2.transport.update_style_guide._session - assert session1 != session2 - session1 = client1.transport.get_style_guide_contents._session - session2 = client2.transport.get_style_guide_contents._session - assert session1 != session2 - session1 = client1.transport.lint_spec._session - session2 = client2.transport.lint_spec._session - assert session1 != session2 - - -def test_linting_service_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LintingServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_linting_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.LintingServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - ], -) -def test_linting_service_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.LintingServiceGrpcTransport, - transports.LintingServiceGrpcAsyncIOTransport, - ], -) -def test_linting_service_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_spec_path(): - project = "squid" - location = "clam" - api = "whelk" - version = "octopus" - spec = "oyster" - expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( - project=project, - location=location, - api=api, - version=version, - spec=spec, - ) - actual = LintingServiceClient.spec_path(project, location, api, version, spec) - assert expected == actual - - -def test_parse_spec_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "api": "mussel", - "version": "winkle", - "spec": "nautilus", - } - path = LintingServiceClient.spec_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_spec_path(path) - assert expected == actual - - -def test_style_guide_path(): - project = "scallop" - location = "abalone" - plugin = "squid" - expected = ( - "projects/{project}/locations/{location}/plugins/{plugin}/styleGuide".format( - project=project, - location=location, - plugin=plugin, - ) - ) - actual = LintingServiceClient.style_guide_path(project, location, plugin) - assert expected == actual - - -def test_parse_style_guide_path(): - expected = { - "project": "clam", - "location": "whelk", - "plugin": "octopus", - } - path = LintingServiceClient.style_guide_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_style_guide_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = LintingServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = LintingServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = LintingServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = LintingServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = LintingServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = LintingServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format( - project=project, - ) - actual = LintingServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = LintingServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = LintingServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = LintingServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = LintingServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.LintingServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.LintingServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = LintingServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.ListOperationsRequest, - dict, - ], -) -def test_list_operations_rest(request_type): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + client_options=options, + transport=transport, + ) - response = client.list_operations(request) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LintingServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # It is an error to provide scopes and a transport instance. + transport = transports.LintingServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LintingServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) -def test_delete_operation(transport: str = "grpc"): - client = LintingServiceClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LintingServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + client = LintingServiceClient(transport=transport) + assert client.transport is transport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_class", + [ + transports.LintingServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = LintingServiceClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_linting_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LintingServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert response is None +def test_linting_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.LintingServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation_field_headers(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_style_guide", + "update_style_guide", + "get_style_guide_contents", + "lint_spec", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + with pytest.raises(NotImplementedError): + transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" +def test_linting_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LintingServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_linting_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.linting_service.transports.LintingServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LintingServiceTransport() + adc.assert_called_once() -def test_delete_operation_from_dict(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } +def test_linting_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LintingServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_linting_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.LintingServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_linting_service_host_no_port(transport_name): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_linting_service_host_with_port(transport_name): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_linting_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LintingServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LintingServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_style_guide._session + session2 = client2.transport.get_style_guide._session + assert session1 != session2 + session1 = client1.transport.update_style_guide._session + session2 = client2.transport.update_style_guide._session + assert session1 != session2 + session1 = client1.transport.get_style_guide_contents._session + session2 = client2.transport.get_style_guide_contents._session + assert session1 != session2 + session1 = client1.transport.lint_spec._session + session2 = client2.transport.lint_spec._session + assert session1 != session2 -def test_cancel_operation_field_headers(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_spec_path(): + project = "squid" + location = "clam" + api = "whelk" + version = "octopus" + spec = "oyster" + expected = "projects/{project}/locations/{location}/apis/{api}/versions/{version}/specs/{spec}".format( + project=project, + location=location, + api=api, + version=version, + spec=spec, ) + actual = LintingServiceClient.spec_path(project, location, api, version, spec) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_spec_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "api": "mussel", + "version": "winkle", + "spec": "nautilus", + } + path = LintingServiceClient.spec_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_spec_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_style_guide_path(): + project = "scallop" + location = "abalone" + plugin = "squid" + expected = ( + "projects/{project}/locations/{location}/plugins/{plugin}/styleGuide".format( + project=project, + location=location, + plugin=plugin, + ) ) + actual = LintingServiceClient.style_guide_path(project, location, plugin) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_style_guide_path(): + expected = { + "project": "clam", + "location": "whelk", + "plugin": "octopus", + } + path = LintingServiceClient.style_guide_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_style_guide_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = LintingServiceClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = LintingServiceClient.common_billing_account_path(**expected) + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, ) + actual = LintingServiceClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = LintingServiceClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = LintingServiceClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = LintingServiceClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, ) + actual = LintingServiceClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = LintingServiceClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = LintingServiceClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = LintingServiceClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = LintingServiceClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = LintingServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.LintingServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.LintingServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = LintingServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = LintingServiceAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): - client = LintingServiceClient(credentials=ga_credentials.AnonymousCredentials()) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials() +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = LintingServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = LintingServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = LintingServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -5772,7 +3023,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = LintingServiceClient( @@ -5789,8 +3039,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (LintingServiceClient, transports.LintingServiceGrpcTransport), - (LintingServiceAsyncClient, transports.LintingServiceGrpcAsyncIOTransport), + (LintingServiceClient, transports.LintingServiceRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py index f3cf6227c307..06f68007eef1 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py @@ -55,11 +55,7 @@ from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.cloud.apihub_v1.services.provisioning import ( - ProvisioningAsyncClient, - ProvisioningClient, - transports, -) +from google.cloud.apihub_v1.services.provisioning import ProvisioningClient, transports from google.cloud.apihub_v1.types import common_fields, provisioning_service @@ -205,11 +201,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ProvisioningClient), ) -@mock.patch.object( - ProvisioningAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ProvisioningAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -294,7 +285,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc"), (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), ], ) @@ -374,8 +364,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (ProvisioningClient, "grpc"), - (ProvisioningAsyncClient, "grpc_asyncio"), (ProvisioningClient, "rest"), ], ) @@ -400,8 +388,6 @@ def test_provisioning_client_from_service_account_info(client_class, transport_n @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.ProvisioningGrpcTransport, "grpc"), - (transports.ProvisioningGrpcAsyncIOTransport, "grpc_asyncio"), (transports.ProvisioningRestTransport, "rest"), ], ) @@ -426,8 +412,6 @@ def test_provisioning_client_service_account_always_use_jwt( @pytest.mark.parametrize( "client_class,transport_name", [ - (ProvisioningClient, "grpc"), - (ProvisioningAsyncClient, "grpc_asyncio"), (ProvisioningClient, "rest"), ], ) @@ -459,24 +443,17 @@ def test_provisioning_client_from_service_account_file(client_class, transport_n def test_provisioning_client_get_transport_class(): transport = ProvisioningClient.get_transport_class() available_transports = [ - transports.ProvisioningGrpcTransport, transports.ProvisioningRestTransport, ] assert transport in available_transports - transport = ProvisioningClient.get_transport_class("grpc") - assert transport == transports.ProvisioningGrpcTransport + transport = ProvisioningClient.get_transport_class("rest") + assert transport == transports.ProvisioningRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc"), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), ], ) @@ -485,11 +462,6 @@ def test_provisioning_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ProvisioningClient), ) -@mock.patch.object( - ProvisioningAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ProvisioningAsyncClient), -) def test_provisioning_client_client_options( client_class, transport_class, transport_name ): @@ -623,20 +595,6 @@ def test_provisioning_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc", "true"), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc", "false"), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), (ProvisioningClient, transports.ProvisioningRestTransport, "rest", "true"), (ProvisioningClient, transports.ProvisioningRestTransport, "rest", "false"), ], @@ -646,11 +604,6 @@ def test_provisioning_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ProvisioningClient), ) -@mock.patch.object( - ProvisioningAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ProvisioningAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_provisioning_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -754,15 +707,10 @@ def test_provisioning_client_mtls_env_auto( ) -@pytest.mark.parametrize("client_class", [ProvisioningClient, ProvisioningAsyncClient]) +@pytest.mark.parametrize("client_class", [ProvisioningClient]) @mock.patch.object( ProvisioningClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ProvisioningClient) ) -@mock.patch.object( - ProvisioningAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ProvisioningAsyncClient), -) def test_provisioning_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() @@ -854,17 +802,12 @@ def test_provisioning_client_get_mtls_endpoint_and_cert_source(client_class): ) -@pytest.mark.parametrize("client_class", [ProvisioningClient, ProvisioningAsyncClient]) +@pytest.mark.parametrize("client_class", [ProvisioningClient]) @mock.patch.object( ProvisioningClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ProvisioningClient), ) -@mock.patch.object( - ProvisioningAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(ProvisioningAsyncClient), -) def test_provisioning_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -941,12 +884,6 @@ def test_provisioning_client_client_api_endpoint(client_class): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport, "grpc"), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - ), (ProvisioningClient, transports.ProvisioningRestTransport, "rest"), ], ) @@ -978,18 +915,6 @@ def test_provisioning_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - ProvisioningClient, - transports.ProvisioningGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), (ProvisioningClient, transports.ProvisioningRestTransport, "rest", None), ], ) @@ -1017,94 +942,6 @@ def test_provisioning_client_client_options_credentials_file( ) -def test_provisioning_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = ProvisioningClient(client_options={"api_endpoint": "squid.clam.whelk"}) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - ProvisioningClient, - transports.ProvisioningGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - ProvisioningAsyncClient, - transports.ProvisioningGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_provisioning_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1112,94 +949,120 @@ def test_provisioning_client_create_channel_credentials_file( dict, ], ) -def test_create_api_hub_instance(request_type, transport: str = "grpc"): +def test_create_api_hub_instance_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["api_hub_instance"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "state_message": "state_message_value", + "config": {"cmek_key_name": "cmek_key_name_value"}, + "labels": {}, + "description": "description_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_api_hub_instance(request) + # Determine if the message type is proto-plus or protobuf + test_field = provisioning_service.CreateApiHubInstanceRequest.meta.fields[ + "api_hub_instance" + ] - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning_service.CreateApiHubInstanceRequest() - assert args[0] == request + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields -def test_create_api_hub_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.CreateApiHubInstanceRequest() + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["api_hub_instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -def test_create_api_hub_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning_service.CreateApiHubInstanceRequest( - parent="parent_value", - api_hub_instance_id="api_hub_instance_id_value", - ) + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["api_hub_instance"][field])): + del request_init["api_hub_instance"][field][i][subfield] + else: + del request_init["api_hub_instance"][field][subfield] + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_api_hub_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.CreateApiHubInstanceRequest( - parent="parent_value", - api_hub_instance_id="api_hub_instance_id_value", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api_hub_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" -def test_create_api_hub_instance_use_cached_wrapped_rpc(): +def test_create_api_hub_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1220,15 +1083,15 @@ def test_create_api_hub_instance_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_api_hub_instance ] = mock_rpc + request = {} client.create_api_hub_instance(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_api_hub_instance(request) @@ -1238,279 +1101,233 @@ def test_create_api_hub_instance_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_api_hub_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.CreateApiHubInstanceRequest() - - -@pytest.mark.asyncio -async def test_create_api_hub_instance_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_create_api_hub_instance_rest_required_fields( + request_type=provisioning_service.CreateApiHubInstanceRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.create_api_hub_instance - in client._client._transport._wrapped_methods - ) + transport_class = transports.ProvisioningRestTransport - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_api_hub_instance - ] = mock_rpc + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - request = {} - await client.create_api_hub_instance(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() + # verify required fields with default values are now present - await client.create_api_hub_instance(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_api_hub_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("api_hub_instance_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_create_api_hub_instance_async( - transport: str = "grpc_asyncio", - request_type=provisioning_service.CreateApiHubInstanceRequest, -): - client = ProvisioningAsyncClient( + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_api_hub_instance(request) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning_service.CreateApiHubInstanceRequest() - assert args[0] == request + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_api_hub_instance(request) -@pytest.mark.asyncio -async def test_create_api_hub_instance_async_from_dict(): - await test_create_api_hub_instance_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_api_hub_instance_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_api_hub_instance_rest_unset_required_fields(): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.CreateApiHubInstanceRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + unset_fields = transport.create_api_hub_instance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("apiHubInstanceId",)) + & set( + ( + "parent", + "apiHubInstance", + ) + ) + ) -@pytest.mark.asyncio -async def test_create_api_hub_instance_field_headers_async(): - client = ProvisioningAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_api_hub_instance_rest_interceptors(null_interceptor): + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProvisioningRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.CreateApiHubInstanceRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ProvisioningClient(transport=transport) with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ProvisioningRestInterceptor, "post_create_api_hub_instance" + ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "pre_create_api_hub_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = provisioning_service.CreateApiHubInstanceRequest.pb( + provisioning_service.CreateApiHubInstanceRequest() ) - await client.create_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) -def test_create_api_hub_instance_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = provisioning_service.CreateApiHubInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.create_api_hub_instance( - parent="parent_value", - api_hub_instance=common_fields.ApiHubInstance(name="name_value"), - api_hub_instance_id="api_hub_instance_id_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].api_hub_instance - mock_val = common_fields.ApiHubInstance(name="name_value") - assert arg == mock_val - arg = args[0].api_hub_instance_id - mock_val = "api_hub_instance_id_value" - assert arg == mock_val - - -def test_create_api_hub_instance_flattened_error(): + pre.assert_called_once() + post.assert_called_once() + + +def test_create_api_hub_instance_rest_bad_request( + transport: str = "rest", + request_type=provisioning_service.CreateApiHubInstanceRequest, +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_api_hub_instance( - provisioning_service.CreateApiHubInstanceRequest(), - parent="parent_value", - api_hub_instance=common_fields.ApiHubInstance(name="name_value"), - api_hub_instance_id="api_hub_instance_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_api_hub_instance(request) -@pytest.mark.asyncio -async def test_create_api_hub_instance_flattened_async(): - client = ProvisioningAsyncClient( +def test_create_api_hub_instance_rest_flattened(): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_api_hub_instance( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", api_hub_instance=common_fields.ApiHubInstance(name="name_value"), api_hub_instance_id="api_hub_instance_id_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_api_hub_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].api_hub_instance - mock_val = common_fields.ApiHubInstance(name="name_value") - assert arg == mock_val - arg = args[0].api_hub_instance_id - mock_val = "api_hub_instance_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_api_hub_instance_flattened_error_async(): - client = ProvisioningAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apiHubInstances" + % client.transport._host, + args[1], + ) + + +def test_create_api_hub_instance_rest_flattened_error(transport: str = "rest"): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_api_hub_instance( + client.create_api_hub_instance( provisioning_service.CreateApiHubInstanceRequest(), parent="parent_value", api_hub_instance=common_fields.ApiHubInstance(name="name_value"), @@ -1518,6 +1335,12 @@ async def test_create_api_hub_instance_flattened_error_async(): ) +def test_create_api_hub_instance_rest_error(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1525,34 +1348,38 @@ async def test_create_api_hub_instance_flattened_error_async(): dict, ], ) -def test_get_api_hub_instance(request_type, transport: str = "grpc"): +def test_get_api_hub_instance_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiHubInstance( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiHubInstance( name="name_value", state=common_fields.ApiHubInstance.State.INACTIVE, state_message="state_message_value", description="description_value", ) - response = client.get_api_hub_instance(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning_service.GetApiHubInstanceRequest() - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiHubInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_hub_instance(request) # Establish that the response is the type that we expect. assert isinstance(response, common_fields.ApiHubInstance) @@ -1562,64 +1389,13 @@ def test_get_api_hub_instance(request_type, transport: str = "grpc"): assert response.description == "description_value" -def test_get_api_hub_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.GetApiHubInstanceRequest() - - -def test_get_api_hub_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning_service.GetApiHubInstanceRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_api_hub_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.GetApiHubInstanceRequest( - name="name_value", - ) - - -def test_get_api_hub_instance_use_cached_wrapped_rpc(): +def test_get_api_hub_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1639,6 +1415,7 @@ def test_get_api_hub_instance_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_api_hub_instance ] = mock_rpc + request = {} client.get_api_hub_instance(request) @@ -1652,275 +1429,237 @@ def test_get_api_hub_instance_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_api_hub_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", +def test_get_api_hub_instance_rest_required_fields( + request_type=provisioning_service.GetApiHubInstanceRequest, +): + transport_class = transports.ProvisioningRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiHubInstance( - name="name_value", - state=common_fields.ApiHubInstance.State.INACTIVE, - state_message="state_message_value", - description="description_value", - ) - ) - response = await client.get_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.GetApiHubInstanceRequest() - - -@pytest.mark.asyncio -async def test_get_api_hub_instance_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.get_api_hub_instance - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_api_hub_instance - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_api_hub_instance(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_api_hub_instance(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_api_hub_instance_async( - transport: str = "grpc_asyncio", - request_type=provisioning_service.GetApiHubInstanceRequest, -): - client = ProvisioningAsyncClient( + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiHubInstance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiHubInstance( - name="name_value", - state=common_fields.ApiHubInstance.State.INACTIVE, - state_message="state_message_value", - description="description_value", - ) - ) - response = await client.get_api_hub_instance(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning_service.GetApiHubInstanceRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = common_fields.ApiHubInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ApiHubInstance) - assert response.name == "name_value" - assert response.state == common_fields.ApiHubInstance.State.INACTIVE - assert response.state_message == "state_message_value" - assert response.description == "description_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_api_hub_instance(request) -@pytest.mark.asyncio -async def test_get_api_hub_instance_async_from_dict(): - await test_get_api_hub_instance_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_api_hub_instance_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_api_hub_instance_rest_unset_required_fields(): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.GetApiHubInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - call.return_value = common_fields.ApiHubInstance() - client.get_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_api_hub_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_api_hub_instance_field_headers_async(): - client = ProvisioningAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_api_hub_instance_rest_interceptors(null_interceptor): + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProvisioningRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.GetApiHubInstanceRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = ProvisioningClient(transport=transport) with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiHubInstance() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProvisioningRestInterceptor, "post_get_api_hub_instance" + ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "pre_get_api_hub_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = provisioning_service.GetApiHubInstanceRequest.pb( + provisioning_service.GetApiHubInstanceRequest() ) - await client.get_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common_fields.ApiHubInstance.to_json( + common_fields.ApiHubInstance() + ) -def test_get_api_hub_instance_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + request = provisioning_service.GetApiHubInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common_fields.ApiHubInstance() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiHubInstance() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. client.get_api_hub_instance( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_api_hub_instance_flattened_error(): +def test_get_api_hub_instance_rest_bad_request( + transport: str = "rest", request_type=provisioning_service.GetApiHubInstanceRequest +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api_hub_instance( - provisioning_service.GetApiHubInstanceRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_api_hub_instance(request) -@pytest.mark.asyncio -async def test_get_api_hub_instance_flattened_async(): - client = ProvisioningAsyncClient( +def test_get_api_hub_instance_rest_flattened(): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = common_fields.ApiHubInstance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common_fields.ApiHubInstance() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common_fields.ApiHubInstance() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_api_hub_instance( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common_fields.ApiHubInstance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_api_hub_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/apiHubInstances/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_api_hub_instance_flattened_error_async(): - client = ProvisioningAsyncClient( +def test_get_api_hub_instance_rest_flattened_error(transport: str = "rest"): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_api_hub_instance( + client.get_api_hub_instance( provisioning_service.GetApiHubInstanceRequest(), name="name_value", ) +def test_get_api_hub_instance_rest_error(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1928,92 +1667,45 @@ async def test_get_api_hub_instance_flattened_error_async(): dict, ], ) -def test_lookup_api_hub_instance(request_type, transport: str = "grpc"): +def test_lookup_api_hub_instance_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning_service.LookupApiHubInstanceResponse() - response = client.lookup_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning_service.LookupApiHubInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning_service.LookupApiHubInstanceResponse) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = provisioning_service.LookupApiHubInstanceResponse() -def test_lookup_api_hub_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = provisioning_service.LookupApiHubInstanceResponse.pb( + return_value ) - client.lookup_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.LookupApiHubInstanceRequest() - - -def test_lookup_api_hub_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + json_return_value = json_format.MessageToJson(return_value) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning_service.LookupApiHubInstanceRequest( - parent="parent_value", - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup_api_hub_instance(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lookup_api_hub_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.LookupApiHubInstanceRequest( - parent="parent_value", - ) + # Establish that the response is the type that we expect. + assert isinstance(response, provisioning_service.LookupApiHubInstanceResponse) -def test_lookup_api_hub_instance_use_cached_wrapped_rpc(): +def test_lookup_api_hub_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2034,6 +1726,7 @@ def test_lookup_api_hub_instance_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.lookup_api_hub_instance ] = mock_rpc + request = {} client.lookup_api_hub_instance(request) @@ -2047,204 +1740,223 @@ def test_lookup_api_hub_instance_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - provisioning_service.LookupApiHubInstanceResponse() - ) - response = await client.lookup_api_hub_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning_service.LookupApiHubInstanceRequest() - - -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_lookup_api_hub_instance_rest_required_fields( + request_type=provisioning_service.LookupApiHubInstanceRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.ProvisioningRestTransport - # Ensure method has been cached - assert ( - client._client._transport.lookup_api_hub_instance - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.lookup_api_hub_instance - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.lookup_api_hub_instance(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.lookup_api_hub_instance(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_async( - transport: str = "grpc_asyncio", - request_type=provisioning_service.LookupApiHubInstanceRequest, -): - client = ProvisioningAsyncClient( + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = provisioning_service.LookupApiHubInstanceResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - provisioning_service.LookupApiHubInstanceResponse() - ) - response = await client.lookup_api_hub_instance(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning_service.LookupApiHubInstanceRequest() - assert args[0] == request + # Convert return value to protobuf type + return_value = provisioning_service.LookupApiHubInstanceResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning_service.LookupApiHubInstanceResponse) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup_api_hub_instance(request) -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_async_from_dict(): - await test_lookup_api_hub_instance_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_lookup_api_hub_instance_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_lookup_api_hub_instance_rest_unset_required_fields(): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.LookupApiHubInstanceRequest() + unset_fields = transport.lookup_api_hub_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) - request.parent = "parent_value" - # Mock the actual call within the gRPC stub, and fake the request. +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lookup_api_hub_instance_rest_interceptors(null_interceptor): + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ProvisioningRestInterceptor(), + ) + client = ProvisioningClient(transport=transport) with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - call.return_value = provisioning_service.LookupApiHubInstanceResponse() - client.lookup_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ProvisioningRestInterceptor, "post_lookup_api_hub_instance" + ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "pre_lookup_api_hub_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = provisioning_service.LookupApiHubInstanceRequest.pb( + provisioning_service.LookupApiHubInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + provisioning_service.LookupApiHubInstanceResponse.to_json( + provisioning_service.LookupApiHubInstanceResponse() + ) + ) + request = provisioning_service.LookupApiHubInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = provisioning_service.LookupApiHubInstanceResponse() -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client.lookup_api_hub_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning_service.LookupApiHubInstanceRequest() + pre.assert_called_once() + post.assert_called_once() - request.parent = "parent_value" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - provisioning_service.LookupApiHubInstanceResponse() - ) - await client.lookup_api_hub_instance(request) +def test_lookup_api_hub_instance_rest_bad_request( + transport: str = "rest", + request_type=provisioning_service.LookupApiHubInstanceRequest, +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lookup_api_hub_instance(request) -def test_lookup_api_hub_instance_flattened(): +def test_lookup_api_hub_instance_rest_flattened(): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning_service.LookupApiHubInstanceResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.lookup_api_hub_instance( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = provisioning_service.LookupApiHubInstanceResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = provisioning_service.LookupApiHubInstanceResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.lookup_api_hub_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/apiHubInstances:lookup" + % client.transport._host, + args[1], + ) -def test_lookup_api_hub_instance_flattened_error(): +def test_lookup_api_hub_instance_rest_flattened_error(transport: str = "rest"): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2256,3044 +1968,798 @@ def test_lookup_api_hub_instance_flattened_error(): ) -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_flattened_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_api_hub_instance), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning_service.LookupApiHubInstanceResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - provisioning_service.LookupApiHubInstanceResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.lookup_api_hub_instance( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val +def test_lookup_api_hub_instance_rest_error(): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) -@pytest.mark.asyncio -async def test_lookup_api_hub_instance_flattened_error_async(): - client = ProvisioningAsyncClient( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - await client.lookup_api_hub_instance( - provisioning_service.LookupApiHubInstanceRequest(), - parent="parent_value", + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - -@pytest.mark.parametrize( - "request_type", - [ - provisioning_service.CreateApiHubInstanceRequest, - dict, - ], -) -def test_create_api_hub_instance_rest(request_type): - client = ProvisioningClient( + # It is an error to provide a credentials file and a transport instance. + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["api_hub_instance"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "state_message": "state_message_value", - "config": {"cmek_key_name": "cmek_key_name_value"}, - "labels": {}, - "description": "description_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = provisioning_service.CreateApiHubInstanceRequest.meta.fields[ - "api_hub_instance" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["api_hub_instance"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["api_hub_instance"][field])): - del request_init["api_hub_instance"][field][i][subfield] - else: - del request_init["api_hub_instance"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_api_hub_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_api_hub_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + with pytest.raises(ValueError): client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_api_hub_instance - in client._transport._wrapped_methods + # It is an error to provide an api_key and a transport instance. + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProvisioningClient( + client_options=options, + transport=transport, ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ProvisioningClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) - client._transport._wrapped_methods[ - client._transport.create_api_hub_instance - ] = mock_rpc - - request = {} - client.create_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_api_hub_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_api_hub_instance_rest_required_fields( - request_type=provisioning_service.CreateApiHubInstanceRequest, -): - transport_class = transports.ProvisioningRestTransport - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + # It is an error to provide scopes and a transport instance. + transport = transports.ProvisioningRestTransport( + credentials=ga_credentials.AnonymousCredentials(), ) + with pytest.raises(ValueError): + client = ProvisioningClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_api_hub_instance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("api_hub_instance_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - client = ProvisioningClient( +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ProvisioningRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_api_hub_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_api_hub_instance_rest_unset_required_fields(): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.create_api_hub_instance._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("apiHubInstanceId",)) - & set( - ( - "parent", - "apiHubInstance", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_api_hub_instance_rest_interceptors(null_interceptor): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ProvisioningRestInterceptor(), - ) - client = ProvisioningClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.ProvisioningRestInterceptor, "post_create_api_hub_instance" - ) as post, mock.patch.object( - transports.ProvisioningRestInterceptor, "pre_create_api_hub_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = provisioning_service.CreateApiHubInstanceRequest.pb( - provisioning_service.CreateApiHubInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() - ) - - request = provisioning_service.CreateApiHubInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_api_hub_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_api_hub_instance_rest_bad_request( - transport: str = "rest", - request_type=provisioning_service.CreateApiHubInstanceRequest, -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_api_hub_instance(request) - - -def test_create_api_hub_instance_rest_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - api_hub_instance=common_fields.ApiHubInstance(name="name_value"), - api_hub_instance_id="api_hub_instance_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_api_hub_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/apiHubInstances" - % client.transport._host, - args[1], - ) - - -def test_create_api_hub_instance_rest_flattened_error(transport: str = "rest"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_api_hub_instance( - provisioning_service.CreateApiHubInstanceRequest(), - parent="parent_value", - api_hub_instance=common_fields.ApiHubInstance(name="name_value"), - api_hub_instance_id="api_hub_instance_id_value", - ) - - -def test_create_api_hub_instance_rest_error(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - provisioning_service.GetApiHubInstanceRequest, - dict, - ], -) -def test_get_api_hub_instance_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiHubInstance( - name="name_value", - state=common_fields.ApiHubInstance.State.INACTIVE, - state_message="state_message_value", - description="description_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ApiHubInstance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_api_hub_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common_fields.ApiHubInstance) - assert response.name == "name_value" - assert response.state == common_fields.ApiHubInstance.State.INACTIVE - assert response.state_message == "state_message_value" - assert response.description == "description_value" - - -def test_get_api_hub_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_api_hub_instance in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_api_hub_instance - ] = mock_rpc - - request = {} - client.get_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_api_hub_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_api_hub_instance_rest_required_fields( - request_type=provisioning_service.GetApiHubInstanceRequest, -): - transport_class = transports.ProvisioningRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiHubInstance() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = common_fields.ApiHubInstance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_api_hub_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_api_hub_instance_rest_unset_required_fields(): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_api_hub_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_api_hub_instance_rest_interceptors(null_interceptor): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ProvisioningRestInterceptor(), - ) - client = ProvisioningClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ProvisioningRestInterceptor, "post_get_api_hub_instance" - ) as post, mock.patch.object( - transports.ProvisioningRestInterceptor, "pre_get_api_hub_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = provisioning_service.GetApiHubInstanceRequest.pb( - provisioning_service.GetApiHubInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common_fields.ApiHubInstance.to_json( - common_fields.ApiHubInstance() - ) - - request = provisioning_service.GetApiHubInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common_fields.ApiHubInstance() - - client.get_api_hub_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_api_hub_instance_rest_bad_request( - transport: str = "rest", request_type=provisioning_service.GetApiHubInstanceRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_api_hub_instance(request) - - -def test_get_api_hub_instance_rest_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common_fields.ApiHubInstance() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apiHubInstances/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common_fields.ApiHubInstance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_api_hub_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/apiHubInstances/*}" - % client.transport._host, - args[1], - ) - - -def test_get_api_hub_instance_rest_flattened_error(transport: str = "rest"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_api_hub_instance( - provisioning_service.GetApiHubInstanceRequest(), - name="name_value", - ) - - -def test_get_api_hub_instance_rest_error(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - provisioning_service.LookupApiHubInstanceRequest, - dict, - ], -) -def test_lookup_api_hub_instance_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = provisioning_service.LookupApiHubInstanceResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = provisioning_service.LookupApiHubInstanceResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.lookup_api_hub_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning_service.LookupApiHubInstanceResponse) - - -def test_lookup_api_hub_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.lookup_api_hub_instance - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.lookup_api_hub_instance - ] = mock_rpc - - request = {} - client.lookup_api_hub_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lookup_api_hub_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_lookup_api_hub_instance_rest_required_fields( - request_type=provisioning_service.LookupApiHubInstanceRequest, -): - transport_class = transports.ProvisioningRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lookup_api_hub_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = provisioning_service.LookupApiHubInstanceResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = provisioning_service.LookupApiHubInstanceResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.lookup_api_hub_instance(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_lookup_api_hub_instance_rest_unset_required_fields(): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.lookup_api_hub_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lookup_api_hub_instance_rest_interceptors(null_interceptor): - transport = transports.ProvisioningRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ProvisioningRestInterceptor(), - ) - client = ProvisioningClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ProvisioningRestInterceptor, "post_lookup_api_hub_instance" - ) as post, mock.patch.object( - transports.ProvisioningRestInterceptor, "pre_lookup_api_hub_instance" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = provisioning_service.LookupApiHubInstanceRequest.pb( - provisioning_service.LookupApiHubInstanceRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - provisioning_service.LookupApiHubInstanceResponse.to_json( - provisioning_service.LookupApiHubInstanceResponse() - ) - ) - - request = provisioning_service.LookupApiHubInstanceRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = provisioning_service.LookupApiHubInstanceResponse() - - client.lookup_api_hub_instance( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_lookup_api_hub_instance_rest_bad_request( - transport: str = "rest", - request_type=provisioning_service.LookupApiHubInstanceRequest, -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.lookup_api_hub_instance(request) - - -def test_lookup_api_hub_instance_rest_flattened(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = provisioning_service.LookupApiHubInstanceResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = provisioning_service.LookupApiHubInstanceResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.lookup_api_hub_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/apiHubInstances:lookup" - % client.transport._host, - args[1], - ) - - -def test_lookup_api_hub_instance_rest_flattened_error(transport: str = "rest"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.lookup_api_hub_instance( - provisioning_service.LookupApiHubInstanceRequest(), - parent="parent_value", - ) - - -def test_lookup_api_hub_instance_rest_error(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ProvisioningClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ProvisioningClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ProvisioningClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ProvisioningClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ProvisioningClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ProvisioningGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ProvisioningGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ProvisioningGrpcTransport, - transports.ProvisioningGrpcAsyncIOTransport, - transports.ProvisioningRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ProvisioningClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ProvisioningGrpcTransport, - ) - - -def test_provisioning_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ProvisioningTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_provisioning_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.ProvisioningTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_api_hub_instance", - "get_api_hub_instance", - "lookup_api_hub_instance", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_provisioning_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ProvisioningTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_provisioning_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ProvisioningTransport() - adc.assert_called_once() - - -def test_provisioning_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ProvisioningClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ProvisioningGrpcTransport, - transports.ProvisioningGrpcAsyncIOTransport, - ], -) -def test_provisioning_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ProvisioningGrpcTransport, - transports.ProvisioningGrpcAsyncIOTransport, - transports.ProvisioningRestTransport, - ], -) -def test_provisioning_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ProvisioningGrpcTransport, grpc_helpers), - (transports.ProvisioningGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_provisioning_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [transports.ProvisioningGrpcTransport, transports.ProvisioningGrpcAsyncIOTransport], -) -def test_provisioning_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_provisioning_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.ProvisioningRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_provisioning_rest_lro_client(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_provisioning_host_no_port(transport_name): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_provisioning_host_with_port(transport_name): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_provisioning_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ProvisioningClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ProvisioningClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_api_hub_instance._session - session2 = client2.transport.create_api_hub_instance._session - assert session1 != session2 - session1 = client1.transport.get_api_hub_instance._session - session2 = client2.transport.get_api_hub_instance._session - assert session1 != session2 - session1 = client1.transport.lookup_api_hub_instance._session - session2 = client2.transport.lookup_api_hub_instance._session - assert session1 != session2 - - -def test_provisioning_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ProvisioningGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_provisioning_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ProvisioningGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ProvisioningGrpcTransport, transports.ProvisioningGrpcAsyncIOTransport], -) -def test_provisioning_transport_channel_mtls_with_client_cert_source(transport_class): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.ProvisioningGrpcTransport, transports.ProvisioningGrpcAsyncIOTransport], -) -def test_provisioning_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_provisioning_grpc_lro_client(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_provisioning_grpc_lro_async_client(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_api_hub_instance_path(): - project = "squid" - location = "clam" - api_hub_instance = "whelk" - expected = "projects/{project}/locations/{location}/apiHubInstances/{api_hub_instance}".format( - project=project, - location=location, - api_hub_instance=api_hub_instance, - ) - actual = ProvisioningClient.api_hub_instance_path( - project, location, api_hub_instance - ) - assert expected == actual - - -def test_parse_api_hub_instance_path(): - expected = { - "project": "octopus", - "location": "oyster", - "api_hub_instance": "nudibranch", - } - path = ProvisioningClient.api_hub_instance_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_api_hub_instance_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = ProvisioningClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = ProvisioningClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = ProvisioningClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = ProvisioningClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = ProvisioningClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = ProvisioningClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = ProvisioningClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = ProvisioningClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = ProvisioningClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = ProvisioningClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ProvisioningClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.ProvisioningTransport, "_prep_wrapped_messages" - ) as prep: - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.ProvisioningTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = ProvisioningClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request ) + client = ProvisioningClient(transport=transport) + assert client.transport is transport - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ProvisioningRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.ListOperationsRequest, - dict, + "rest", ], ) -def test_list_operations_rest(request_type): - client = ProvisioningClient( +def test_transport_kind(transport_name): + transport = ProvisioningClient.get_transport_class(transport_name)( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + assert transport.kind == transport_name - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_operations(request) +def test_provisioning_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ProvisioningTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_provisioning_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ProvisioningTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation(transport: str = "grpc"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_api_hub_instance", + "get_api_hub_instance", + "lookup_api_hub_instance", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - + with pytest.raises(NotImplementedError): + transport.close() -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the response is the type that we expect. - assert response is None +def test_provisioning_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProvisioningTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) -def test_delete_operation_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) +def test_provisioning_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.provisioning.transports.ProvisioningTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ProvisioningTransport() + adc.assert_called_once() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None +def test_provisioning_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ProvisioningClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_provisioning_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ProvisioningRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ProvisioningAsyncClient( +def test_provisioning_rest_lro_client(): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + transport = client.transport - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client -def test_delete_operation_from_dict(): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_provisioning_host_no_port(transport_name): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_provisioning_host_with_port(transport_name): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_provisioning_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ProvisioningClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ProvisioningClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_api_hub_instance._session + session2 = client2.transport.create_api_hub_instance._session + assert session1 != session2 + session1 = client1.transport.get_api_hub_instance._session + session2 = client2.transport.get_api_hub_instance._session + assert session1 != session2 + session1 = client1.transport.lookup_api_hub_instance._session + session2 = client2.transport.lookup_api_hub_instance._session + assert session1 != session2 -def test_cancel_operation_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_api_hub_instance_path(): + project = "squid" + location = "clam" + api_hub_instance = "whelk" + expected = "projects/{project}/locations/{location}/apiHubInstances/{api_hub_instance}".format( + project=project, + location=location, + api_hub_instance=api_hub_instance, ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + actual = ProvisioningClient.api_hub_instance_path( + project, location, api_hub_instance ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_api_hub_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "api_hub_instance": "nudibranch", + } + path = ProvisioningClient.api_hub_instance_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_api_hub_instance_path(path) + assert expected == actual -def test_cancel_operation_from_dict(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + actual = ProvisioningClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = ProvisioningClient.common_billing_account_path(**expected) + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_billing_account_path(path) + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, ) + actual = ProvisioningClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = ProvisioningClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, ) + actual = ProvisioningClient.common_organization_path(organization) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = ProvisioningClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, ) + actual = ProvisioningClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = ProvisioningClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, ) + actual = ProvisioningClient.common_location_path(project, location) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = ProvisioningClient.common_location_path(**expected) + # Check that the path construction is reversible. + actual = ProvisioningClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = ProvisioningClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() + with mock.patch.object( + transports.ProvisioningTransport, "_prep_wrapped_messages" + ) as prep: + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - response = await client.get_operation( - request={ - "name": "locations", - } + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ProvisioningTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ProvisioningClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -def test_list_locations_from_dict(): +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_get_location(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert response is None -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ProvisioningAsyncClient( +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -def test_get_location_field_headers(): - client = ProvisioningClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + response = client.get_operation(request) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ProvisioningAsyncClient(credentials=ga_credentials.AnonymousCredentials()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ProvisioningClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = ProvisioningClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ProvisioningAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -5311,7 +2777,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = ProvisioningClient( @@ -5328,8 +2793,7 @@ def test_client_ctx(): @pytest.mark.parametrize( "client_class,transport_class", [ - (ProvisioningClient, transports.ProvisioningGrpcTransport), - (ProvisioningAsyncClient, transports.ProvisioningGrpcAsyncIOTransport), + (ProvisioningClient, transports.ProvisioningRestTransport), ], ) def test_api_key_credentials(client_class, transport_class): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py index f56c0e63d5f7..836aa804eb51 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py @@ -47,7 +47,6 @@ from requests.sessions import Session from google.cloud.apihub_v1.services.runtime_project_attachment_service import ( - RuntimeProjectAttachmentServiceAsyncClient, RuntimeProjectAttachmentServiceClient, pagers, transports, @@ -234,11 +233,6 @@ def test__get_client_cert_source(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(RuntimeProjectAttachmentServiceAsyncClient), -) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() @@ -337,11 +331,6 @@ def test__get_universe_domain(): @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -425,8 +414,6 @@ def test__validate_universe_domain(client_class, transport_class, transport_name @pytest.mark.parametrize( "client_class,transport_name", [ - (RuntimeProjectAttachmentServiceClient, "grpc"), - (RuntimeProjectAttachmentServiceAsyncClient, "grpc_asyncio"), (RuntimeProjectAttachmentServiceClient, "rest"), ], ) @@ -453,11 +440,6 @@ def test_runtime_project_attachment_service_client_from_service_account_info( @pytest.mark.parametrize( "transport_class,transport_name", [ - (transports.RuntimeProjectAttachmentServiceGrpcTransport, "grpc"), - ( - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), (transports.RuntimeProjectAttachmentServiceRestTransport, "rest"), ], ) @@ -482,8 +464,6 @@ def test_runtime_project_attachment_service_client_service_account_always_use_jw @pytest.mark.parametrize( "client_class,transport_name", [ - (RuntimeProjectAttachmentServiceClient, "grpc"), - (RuntimeProjectAttachmentServiceAsyncClient, "grpc_asyncio"), (RuntimeProjectAttachmentServiceClient, "rest"), ], ) @@ -517,28 +497,17 @@ def test_runtime_project_attachment_service_client_from_service_account_file( def test_runtime_project_attachment_service_client_get_transport_class(): transport = RuntimeProjectAttachmentServiceClient.get_transport_class() available_transports = [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, transports.RuntimeProjectAttachmentServiceRestTransport, ] assert transport in available_transports - transport = RuntimeProjectAttachmentServiceClient.get_transport_class("grpc") - assert transport == transports.RuntimeProjectAttachmentServiceGrpcTransport + transport = RuntimeProjectAttachmentServiceClient.get_transport_class("rest") + assert transport == transports.RuntimeProjectAttachmentServiceRestTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -551,11 +520,6 @@ def test_runtime_project_attachment_service_client_get_transport_class(): "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(RuntimeProjectAttachmentServiceAsyncClient), -) def test_runtime_project_attachment_service_client_client_options( client_class, transport_class, transport_name ): @@ -693,30 +657,6 @@ def test_runtime_project_attachment_service_client_client_options( @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - "true", - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - "false", - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -736,11 +676,6 @@ def test_runtime_project_attachment_service_client_client_options( "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(RuntimeProjectAttachmentServiceAsyncClient), -) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_runtime_project_attachment_service_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env @@ -844,20 +779,12 @@ def test_runtime_project_attachment_service_client_mtls_env_auto( ) -@pytest.mark.parametrize( - "client_class", - [RuntimeProjectAttachmentServiceClient, RuntimeProjectAttachmentServiceAsyncClient], -) +@pytest.mark.parametrize("client_class", [RuntimeProjectAttachmentServiceClient]) @mock.patch.object( RuntimeProjectAttachmentServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(RuntimeProjectAttachmentServiceAsyncClient), -) def test_runtime_project_attachment_service_client_get_mtls_endpoint_and_cert_source( client_class, ): @@ -951,20 +878,12 @@ def test_runtime_project_attachment_service_client_get_mtls_endpoint_and_cert_so ) -@pytest.mark.parametrize( - "client_class", - [RuntimeProjectAttachmentServiceClient, RuntimeProjectAttachmentServiceAsyncClient], -) +@pytest.mark.parametrize("client_class", [RuntimeProjectAttachmentServiceClient]) @mock.patch.object( RuntimeProjectAttachmentServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(RuntimeProjectAttachmentServiceClient), ) -@mock.patch.object( - RuntimeProjectAttachmentServiceAsyncClient, - "_DEFAULT_ENDPOINT_TEMPLATE", - modify_default_endpoint_template(RuntimeProjectAttachmentServiceAsyncClient), -) def test_runtime_project_attachment_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" @@ -1045,16 +964,6 @@ def test_runtime_project_attachment_service_client_client_api_endpoint(client_cl @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -1090,18 +999,6 @@ def test_runtime_project_attachment_service_client_client_options_scopes( @pytest.mark.parametrize( "client_class,transport_class,transport_name,grpc_helpers", [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), ( RuntimeProjectAttachmentServiceClient, transports.RuntimeProjectAttachmentServiceRestTransport, @@ -1134,96 +1031,6 @@ def test_runtime_project_attachment_service_client_client_options_credentials_fi ) -def test_runtime_project_attachment_service_client_client_options_from_dict(): - with mock.patch( - "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = RuntimeProjectAttachmentServiceClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_runtime_project_attachment_service_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format( - UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE - ), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - @pytest.mark.parametrize( "request_type", [ @@ -1231,34 +1038,114 @@ def test_runtime_project_attachment_service_client_create_channel_credentials_fi dict, ], ) -def test_create_runtime_project_attachment(request_type, transport: str = "grpc"): +def test_create_runtime_project_attachment_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["runtime_project_attachment"] = { + "name": "name_value", + "runtime_project": "runtime_project_value", + "create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = runtime_project_attachment_service.RuntimeProjectAttachment( + # Determine if the message type is proto-plus or protobuf + test_field = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.meta.fields[ + "runtime_project_attachment" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "runtime_project_attachment" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["runtime_project_attachment"][field]) + ): + del request_init["runtime_project_attachment"][field][i][subfield] + else: + del request_init["runtime_project_attachment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment( name="name_value", runtime_project="runtime_project_value", ) - response = client.create_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_runtime_project_attachment(request) # Establish that the response is the type that we expect. assert isinstance( @@ -1268,71 +1155,13 @@ def test_create_runtime_project_attachment(request_type, transport: str = "grpc" assert response.runtime_project == "runtime_project_value" -def test_create_runtime_project_attachment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - - -def test_create_runtime_project_attachment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest( - parent="parent_value", - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_runtime_project_attachment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest( - parent="parent_value", - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) - - -def test_create_runtime_project_attachment_use_cached_wrapped_rpc(): +def test_create_runtime_project_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1353,6 +1182,7 @@ def test_create_runtime_project_attachment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.create_runtime_project_attachment ] = mock_rpc + request = {} client.create_runtime_project_attachment(request) @@ -1366,305 +1196,278 @@ def test_create_runtime_project_attachment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - ) - response = await client.create_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - - -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_create_runtime_project_attachment_rest_required_fields( + request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.create_runtime_project_attachment - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request_init["runtime_project_attachment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.create_runtime_project_attachment - ] = mock_rpc + # verify fields with default values are dropped + assert "runtimeProjectAttachmentId" not in jsonified_request - request = {} - await client.create_runtime_project_attachment(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present + assert "runtimeProjectAttachmentId" in jsonified_request + assert ( + jsonified_request["runtimeProjectAttachmentId"] + == request_init["runtime_project_attachment_id"] + ) - await client.create_runtime_project_attachment(request) + jsonified_request["parent"] = "parent_value" + jsonified_request[ + "runtimeProjectAttachmentId" + ] = "runtime_project_attachment_id_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("runtime_project_attachment_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "runtimeProjectAttachmentId" in jsonified_request + assert ( + jsonified_request["runtimeProjectAttachmentId"] + == "runtime_project_attachment_id_value" + ) -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_async( - transport: str = "grpc_asyncio", - request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceAsyncClient( + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - ) - response = await client.create_runtime_project_attachment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance( - response, runtime_project_attachment_service.RuntimeProjectAttachment - ) - assert response.name == "name_value" - assert response.runtime_project == "runtime_project_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_runtime_project_attachment(request) -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_async_from_dict(): - await test_create_runtime_project_attachment_async(request_type=dict) + expected_params = [ + ( + "runtimeProjectAttachmentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_create_runtime_project_attachment_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_create_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() + unset_fields = ( + transport.create_runtime_project_attachment._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("runtimeProjectAttachmentId",)) + & set( + ( + "parent", + "runtimeProjectAttachmentId", + "runtimeProjectAttachment", + ) ) - client.create_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + ) -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_create_runtime_project_attachment", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_create_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - - request.parent = "parent_value" + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + runtime_project_attachment_service.RuntimeProjectAttachment.to_json( + runtime_project_attachment_service.RuntimeProjectAttachment() + ) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + request = ( + runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( runtime_project_attachment_service.RuntimeProjectAttachment() ) - await client.create_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + client.create_runtime_project_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + pre.assert_called_once() + post.assert_called_once() -def test_create_runtime_project_attachment_flattened(): +def test_create_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_runtime_project_attachment( - parent="parent_value", - runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ), - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].runtime_project_attachment - mock_val = runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ) - assert arg == mock_val - arg = args[0].runtime_project_attachment_id - mock_val = "runtime_project_attachment_id_value" - assert arg == mock_val - - -def test_create_runtime_project_attachment_flattened_error(): + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_runtime_project_attachment(request) + + +def test_create_runtime_project_attachment_rest_flattened(): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_runtime_project_attachment( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest(), + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( name="name_value" ), runtime_project_attachment_id="runtime_project_attachment_id_value", ) + mock_args.update(sample_request) - -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_runtime_project_attachment( - parent="parent_value", - runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ), - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) + client.create_runtime_project_attachment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].runtime_project_attachment - mock_val = runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ) - assert arg == mock_val - arg = args[0].runtime_project_attachment_id - mock_val = "runtime_project_attachment_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_runtime_project_attachment_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" + % client.transport._host, + args[1], + ) + + +def test_create_runtime_project_attachment_rest_flattened_error( + transport: str = "rest", +): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_runtime_project_attachment( + client.create_runtime_project_attachment( runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest(), parent="parent_value", runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( @@ -1674,6 +1477,12 @@ async def test_create_runtime_project_attachment_flattened_error_async(): ) +def test_create_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1681,34 +1490,38 @@ async def test_create_runtime_project_attachment_flattened_error_async(): dict, ], ) -def test_get_runtime_project_attachment(request_type, transport: str = "grpc"): +def test_get_runtime_project_attachment_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = runtime_project_attachment_service.RuntimeProjectAttachment( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment( name="name_value", runtime_project="runtime_project_value", ) - response = client.get_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_runtime_project_attachment(request) # Establish that the response is the type that we expect. assert isinstance( @@ -1718,69 +1531,13 @@ def test_get_runtime_project_attachment(request_type, transport: str = "grpc"): assert response.runtime_project == "runtime_project_value" -def test_get_runtime_project_attachment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - - -def test_get_runtime_project_attachment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_runtime_project_attachment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest( - name="name_value", - ) - - -def test_get_runtime_project_attachment_use_cached_wrapped_rpc(): +def test_get_runtime_project_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -1801,6 +1558,7 @@ def test_get_runtime_project_attachment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.get_runtime_project_attachment ] = mock_rpc + request = {} client.get_runtime_project_attachment(request) @@ -1814,282 +1572,256 @@ def test_get_runtime_project_attachment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - ) - response = await client.get_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - - -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_get_runtime_project_attachment_rest_required_fields( + request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.get_runtime_project_attachment - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.get_runtime_project_attachment - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.get_runtime_project_attachment(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.get_runtime_project_attachment(request) + jsonified_request["name"] = "name_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_async( - transport: str = "grpc_asyncio", - request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceAsyncClient( + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - ) - response = await client.get_runtime_project_attachment(request) + response_value = Response() + response_value.status_code = 200 - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the response is the type that we expect. - assert isinstance( - response, runtime_project_attachment_service.RuntimeProjectAttachment - ) - assert response.name == "name_value" - assert response.runtime_project == "runtime_project_value" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_runtime_project_attachment(request) -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_async_from_dict(): - await test_get_runtime_project_attachment_async(request_type=dict) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_runtime_project_attachment_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - client.get_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = transport.get_runtime_project_attachment._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. + client = RuntimeProjectAttachmentServiceClient(transport=transport) with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment() + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_get_runtime_project_attachment", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_get_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() + ) ) - await client.get_runtime_project_attachment(request) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] - - -def test_get_runtime_project_attachment_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + runtime_project_attachment_service.RuntimeProjectAttachment.to_json( + runtime_project_attachment_service.RuntimeProjectAttachment() + ) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + request = ( + runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( runtime_project_attachment_service.RuntimeProjectAttachment() ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. + client.get_runtime_project_attachment( - name="name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_get_runtime_project_attachment_flattened_error(): +def test_get_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_runtime_project_attachment( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_runtime_project_attachment(request) -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_get_runtime_project_attachment_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_runtime_project_attachment( + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_runtime_project_attachment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_get_runtime_project_attachment_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_get_runtime_project_attachment_rest_flattened_error(transport: str = "rest"): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_runtime_project_attachment( + client.get_runtime_project_attachment( runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest(), name="name_value", ) +def test_get_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2097,110 +1829,52 @@ async def test_get_runtime_project_attachment_flattened_error_async(): dict, ], ) -def test_list_runtime_project_attachments(request_type, transport: str = "grpc"): +def test_list_runtime_project_attachments_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( next_page_token="next_page_token_value", ) ) - response = client.list_runtime_project_attachments(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( + return_value + ) ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_runtime_project_attachments(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListRuntimeProjectAttachmentsPager) assert response.next_page_token == "next_page_token_value" -def test_list_runtime_project_attachments_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_runtime_project_attachments() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - - -def test_list_runtime_project_attachments_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_runtime_project_attachments(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", - ) - - -def test_list_runtime_project_attachments_use_cached_wrapped_rpc(): +def test_list_runtime_project_attachments_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2221,6 +1895,7 @@ def test_list_runtime_project_attachments_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.list_runtime_project_attachments ] = mock_rpc + request = {} client.list_runtime_project_attachments(request) @@ -2234,289 +1909,277 @@ def test_list_runtime_project_attachments_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_runtime_project_attachments() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - - -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", +def test_list_runtime_project_attachments_rest_required_fields( + request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, ): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Ensure method has been cached - assert ( - client._client._transport.list_runtime_project_attachments - in client._client._transport._wrapped_methods - ) + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.list_runtime_project_attachments - ] = mock_rpc + # verify fields with default values are dropped - request = {} - await client.list_runtime_project_attachments(request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # verify required fields with default values are now present - await client.list_runtime_project_attachments(request) + jsonified_request["parent"] = "parent_value" - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async( - transport: str = "grpc_asyncio", - request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, -): - client = RuntimeProjectAttachmentServiceAsyncClient( + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request = request_type(**request_init) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_runtime_project_attachments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRuntimeProjectAttachmentsAsyncPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async_from_dict(): - await test_list_runtime_project_attachments_async(request_type=dict) - - -def test_list_runtime_project_attachments_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + # Designate an appropriate value for the returned response. + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + response_value = Response() + response_value.status_code = 200 - request.parent = "parent_value" + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - call.return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - client.list_runtime_project_attachments(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.list_runtime_project_attachments(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_list_runtime_project_attachments_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() + unset_fields = ( + transport.list_runtime_project_attachments._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) ) - await client.list_runtime_project_attachments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] + & set(("parent",)) + ) -def test_list_runtime_project_attachments_flattened(): - client = RuntimeProjectAttachmentServiceClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_runtime_project_attachments_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = RuntimeProjectAttachmentServiceClient(transport=transport) with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_list_runtime_project_attachments", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_list_runtime_project_attachments", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest.pb( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() + ) + + request = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. + client.list_runtime_project_attachments( - parent="parent_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + pre.assert_called_once() + post.assert_called_once() -def test_list_runtime_project_attachments_flattened_error(): +def test_list_runtime_project_attachments_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_runtime_project_attachments( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest(), - parent="parent_value", - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_runtime_project_attachments(request) -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_list_runtime_project_attachments_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() ) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_runtime_project_attachments( + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( parent="parent_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_runtime_project_attachments(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_list_runtime_project_attachments_rest_flattened_error(transport: str = "rest"): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_runtime_project_attachments( + client.list_runtime_project_attachments( runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest(), parent="parent_value", ) -def test_list_runtime_project_attachments_pager(transport_name: str = "grpc"): +def test_list_runtime_project_attachments_rest_pager(transport: str = "rest"): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( runtime_project_attachments=[ runtime_project_attachment_service.RuntimeProjectAttachment(), @@ -2541,22 +2204,26 @@ def test_list_runtime_project_attachments_pager(transport_name: str = "grpc"): runtime_project_attachment_service.RuntimeProjectAttachment(), ], ), - RuntimeError, ) + # Two responses for two calls + response = response + response - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_runtime_project_attachments( - request={}, retry=retry, timeout=timeout + # Wrap the values into proper Response objs + response = tuple( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( + x + ) + for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + pager = client.list_runtime_project_attachments(request=sample_request) results = list(pager) assert len(results) == 6 @@ -2565,152 +2232,9 @@ def test_list_runtime_project_attachments_pager(transport_name: str = "grpc"): for i in results ) - -def test_list_runtime_project_attachments_pages(transport_name: str = "grpc"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="abc", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[], - next_page_token="def", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="ghi", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - ), - RuntimeError, + pages = list( + client.list_runtime_project_attachments(request=sample_request).pages ) - pages = list(client.list_runtime_project_attachments(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async_pager(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="abc", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[], - next_page_token="def", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="ghi", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_runtime_project_attachments( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all( - isinstance(i, runtime_project_attachment_service.RuntimeProjectAttachment) - for i in responses - ) - - -@pytest.mark.asyncio -async def test_list_runtime_project_attachments_async_pages(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_runtime_project_attachments), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="abc", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[], - next_page_token="def", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="ghi", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_runtime_project_attachments(request={}) - ).pages: - pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2722,99 +2246,43 @@ async def test_list_runtime_project_attachments_async_pages(): dict, ], ) -def test_delete_runtime_project_attachment(request_type, transport: str = "grpc"): +def test_delete_runtime_project_attachment_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_runtime_project_attachment(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_runtime_project_attachment(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_runtime_project_attachment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - - -def test_delete_runtime_project_attachment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_runtime_project_attachment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest( - name="name_value", - ) - - -def test_delete_runtime_project_attachment_use_cached_wrapped_rpc(): +def test_delete_runtime_project_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -2835,6 +2303,7 @@ def test_delete_runtime_project_attachment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.delete_runtime_project_attachment ] = mock_rpc + request = {} client.delete_runtime_project_attachment(request) @@ -2848,258 +2317,232 @@ def test_delete_runtime_project_attachment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - - -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.delete_runtime_project_attachment - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.delete_runtime_project_attachment - ] = mock_rpc - - request = {} - await client.delete_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_async( - transport: str = "grpc_asyncio", +def test_delete_runtime_project_attachment_rest_required_fields( request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, ): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_runtime_project_attachment(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert response is None + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_async_from_dict(): - await test_delete_runtime_project_attachment_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_delete_runtime_project_attachment_field_headers(): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - request.name = "name_value" + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - call.return_value = None - client.delete_runtime_project_attachment(request) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.delete_runtime_project_attachment(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_delete_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + unset_fields = ( + transport.delete_runtime_project_attachment._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_runtime_project_attachment_flattened(): - client = RuntimeProjectAttachmentServiceClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), ) - - # Mock the actual call within the gRPC stub, and fake the request. + client = RuntimeProjectAttachmentServiceClient(transport=transport) with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_runtime_project_attachment( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_runtime_project_attachment_flattened_error(): + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_delete_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + pb_message = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = ( + runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_runtime_project_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_runtime_project_attachment( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_runtime_project_attachment(request) -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_delete_runtime_project_attachment_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_runtime_project_attachment( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_runtime_project_attachment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" + % client.transport._host, + args[1], + ) -@pytest.mark.asyncio -async def test_delete_runtime_project_attachment_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_delete_runtime_project_attachment_rest_flattened_error( + transport: str = "rest", +): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_runtime_project_attachment( + client.delete_runtime_project_attachment( runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest(), name="name_value", ) +def test_delete_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3107,33 +2550,35 @@ async def test_delete_runtime_project_attachment_flattened_error_async(): dict, ], ) -def test_lookup_runtime_project_attachment(request_type, transport: str = "grpc"): +def test_lookup_runtime_project_attachment_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() ) - response = client.lookup_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( + return_value ) - assert args[0] == request + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lookup_runtime_project_attachment(request) # Establish that the response is the type that we expect. assert isinstance( @@ -3142,69 +2587,13 @@ def test_lookup_runtime_project_attachment(request_type, transport: str = "grpc" ) -def test_lookup_runtime_project_attachment_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lookup_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - - -def test_lookup_runtime_project_attachment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.lookup_runtime_project_attachment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest( - name="name_value", - ) - - -def test_lookup_runtime_project_attachment_use_cached_wrapped_rpc(): +def test_lookup_runtime_project_attachment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) # Should wrap all calls on client creation @@ -3225,6 +2614,7 @@ def test_lookup_runtime_project_attachment_use_cached_wrapped_rpc(): client._transport._wrapped_methods[ client._transport.lookup_runtime_project_attachment ] = mock_rpc + request = {} client.lookup_runtime_project_attachment(request) @@ -3238,216 +2628,235 @@ def test_lookup_runtime_project_attachment_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - response = await client.lookup_runtime_project_attachment() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert ( - args[0] - == runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - - -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._client._transport.lookup_runtime_project_attachment - in client._client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[ - client._client._transport.lookup_runtime_project_attachment - ] = mock_rpc - - request = {} - await client.lookup_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.lookup_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_async( - transport: str = "grpc_asyncio", +def test_lookup_runtime_project_attachment_rest_required_fields( request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, ): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - response = await client.lookup_runtime_project_attachment(request) + # verify fields with default values are dropped - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - assert args[0] == request + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the response is the type that we expect. - assert isinstance( - response, - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse, - ) + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_async_from_dict(): - await test_lookup_runtime_project_attachment_async(request_type=dict) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_lookup_runtime_project_attachment_field_headers(): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - - request.name = "name_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - call.return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - client.lookup_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + # Designate an appropriate value for the returned response. + return_value = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + response = client.lookup_runtime_project_attachment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_lookup_runtime_project_attachment_rest_unset_required_fields(): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + unset_fields = ( + transport.lookup_runtime_project_attachment._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) - request.name = "name_value" - # Mock the actual call within the gRPC stub, and fake the request. +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lookup_runtime_project_attachment_rest_interceptors(null_interceptor): + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RuntimeProjectAttachmentServiceRestInterceptor(), + ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_lookup_runtime_project_attachment", + ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "pre_lookup_runtime_project_attachment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest.pb( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.to_json( runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() ) - await client.lookup_runtime_project_attachment(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=name_value", - ) in kw["metadata"] + client.lookup_runtime_project_attachment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() -def test_lookup_runtime_project_attachment_flattened(): +def test_lookup_runtime_project_attachment_rest_bad_request( + transport: str = "rest", + request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lookup_runtime_project_attachment(request) + + +def test_lookup_runtime_project_attachment_rest_flattened(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.lookup_runtime_project_attachment( + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( name="name_value", ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.lookup_runtime_project_attachment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*}:lookupRuntimeProjectAttachment" + % client.transport._host, + args[1], + ) -def test_lookup_runtime_project_attachment_flattened_error(): +def test_lookup_runtime_project_attachment_rest_flattened_error( + transport: str = "rest", +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -3459,3853 +2868,802 @@ def test_lookup_runtime_project_attachment_flattened_error(): ) -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_flattened_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_lookup_runtime_project_attachment_rest_error(): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_runtime_project_attachment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.lookup_runtime_project_attachment( - name="name_value", + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + # It is an error to provide an api_key and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + client_options=options, + transport=transport, + ) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RuntimeProjectAttachmentServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) -@pytest.mark.asyncio -async def test_lookup_runtime_project_attachment_flattened_error_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( + # It is an error to provide scopes and a transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. with pytest.raises(ValueError): - await client.lookup_runtime_project_attachment( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest(), - name="name_value", + client = RuntimeProjectAttachmentServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, ) +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RuntimeProjectAttachmentServiceClient(transport=transport) + assert client.transport is transport + + @pytest.mark.parametrize( - "request_type", + "transport_class", [ - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, - dict, + transports.RuntimeProjectAttachmentServiceRestTransport, ], ) -def test_create_runtime_project_attachment_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["runtime_project_attachment"] = { - "name": "name_value", - "runtime_project": "runtime_project_value", - "create_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.meta.fields[ - "runtime_project_attachment" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "runtime_project_attachment" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range( - 0, len(request_init["runtime_project_attachment"][field]) - ): - del request_init["runtime_project_attachment"][field][i][subfield] - else: - del request_init["runtime_project_attachment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_runtime_project_attachment(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, runtime_project_attachment_service.RuntimeProjectAttachment - ) - assert response.name == "name_value" - assert response.runtime_project == "runtime_project_value" - - -def test_create_runtime_project_attachment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_runtime_project_attachment - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_runtime_project_attachment - ] = mock_rpc - - request = {} - client.create_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_runtime_project_attachment_rest_required_fields( - request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["runtime_project_attachment_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - assert "runtimeProjectAttachmentId" not in jsonified_request - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "runtimeProjectAttachmentId" in jsonified_request - assert ( - jsonified_request["runtimeProjectAttachmentId"] - == request_init["runtime_project_attachment_id"] - ) - - jsonified_request["parent"] = "parent_value" - jsonified_request[ - "runtimeProjectAttachmentId" - ] = "runtime_project_attachment_id_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_runtime_project_attachment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("runtime_project_attachment_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "runtimeProjectAttachmentId" in jsonified_request - assert ( - jsonified_request["runtimeProjectAttachmentId"] - == "runtime_project_attachment_id_value" - ) - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.create_runtime_project_attachment(request) - - expected_params = [ - ( - "runtimeProjectAttachmentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_create_runtime_project_attachment_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.create_runtime_project_attachment._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set(("runtimeProjectAttachmentId",)) - & set( - ( - "parent", - "runtimeProjectAttachmentId", - "runtimeProjectAttachment", - ) - ) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_runtime_project_attachment_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "post_create_runtime_project_attachment", - ) as post, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_create_runtime_project_attachment", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.pb( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - runtime_project_attachment_service.RuntimeProjectAttachment.to_json( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - ) - - request = ( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - - client.create_runtime_project_attachment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_runtime_project_attachment_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_runtime_project_attachment(request) - - -def test_create_runtime_project_attachment_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ), - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_runtime_project_attachment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" - % client.transport._host, - args[1], - ) - - -def test_create_runtime_project_attachment_rest_flattened_error( - transport: str = "rest", -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_runtime_project_attachment( - runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest(), - parent="parent_value", - runtime_project_attachment=runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value" - ), - runtime_project_attachment_id="runtime_project_attachment_id_value", - ) - - -def test_create_runtime_project_attachment_rest_error(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, - dict, - ], -) -def test_get_runtime_project_attachment_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment( - name="name_value", - runtime_project="runtime_project_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_runtime_project_attachment(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, runtime_project_attachment_service.RuntimeProjectAttachment - ) - assert response.name == "name_value" - assert response.runtime_project == "runtime_project_value" - - -def test_get_runtime_project_attachment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_runtime_project_attachment - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_runtime_project_attachment - ] = mock_rpc - - request = {} - client.get_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_runtime_project_attachment_rest_required_fields( - request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_runtime_project_attachment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_runtime_project_attachment_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_runtime_project_attachment._get_unset_required_fields( - {} - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_runtime_project_attachment_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "post_get_runtime_project_attachment", - ) as post, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_get_runtime_project_attachment", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = ( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest.pb( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - runtime_project_attachment_service.RuntimeProjectAttachment.to_json( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - ) - - request = ( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - runtime_project_attachment_service.RuntimeProjectAttachment() - ) - - client.get_runtime_project_attachment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_runtime_project_attachment_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_runtime_project_attachment(request) - - -def test_get_runtime_project_attachment_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = runtime_project_attachment_service.RuntimeProjectAttachment() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.RuntimeProjectAttachment.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.get_runtime_project_attachment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" - % client.transport._host, - args[1], - ) - - -def test_get_runtime_project_attachment_rest_flattened_error(transport: str = "rest"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_runtime_project_attachment( - runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest(), - name="name_value", - ) - - -def test_get_runtime_project_attachment_rest_error(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, - dict, - ], -) -def test_list_runtime_project_attachments_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - next_page_token="next_page_token_value", - ) - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_runtime_project_attachments(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRuntimeProjectAttachmentsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_runtime_project_attachments_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.list_runtime_project_attachments - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_runtime_project_attachments - ] = mock_rpc - - request = {} - client.list_runtime_project_attachments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_runtime_project_attachments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_runtime_project_attachments_rest_required_fields( - request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_runtime_project_attachments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_runtime_project_attachments(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_runtime_project_attachments_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.list_runtime_project_attachments._get_unset_required_fields({}) - ) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_runtime_project_attachments_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "post_list_runtime_project_attachments", - ) as post, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_list_runtime_project_attachments", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest.pb( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - - request = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - - client.list_runtime_project_attachments( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_runtime_project_attachments_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_runtime_project_attachments(request) - - -def test_list_runtime_project_attachments_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() - ) - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.pb( - return_value - ) - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_runtime_project_attachments(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/runtimeProjectAttachments" - % client.transport._host, - args[1], - ) - - -def test_list_runtime_project_attachments_rest_flattened_error(transport: str = "rest"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_runtime_project_attachments( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest(), - parent="parent_value", - ) - - -def test_list_runtime_project_attachments_rest_pager(transport: str = "rest"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="abc", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[], - next_page_token="def", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - next_page_token="ghi", - ), - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse( - runtime_project_attachments=[ - runtime_project_attachment_service.RuntimeProjectAttachment(), - runtime_project_attachment_service.RuntimeProjectAttachment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse.to_json( - x - ) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_runtime_project_attachments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, runtime_project_attachment_service.RuntimeProjectAttachment) - for i in results - ) - - pages = list( - client.list_runtime_project_attachments(request=sample_request).pages - ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, - dict, - ], -) -def test_delete_runtime_project_attachment_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_runtime_project_attachment(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_runtime_project_attachment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.delete_runtime_project_attachment - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_runtime_project_attachment - ] = mock_rpc - - request = {} - client.delete_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_runtime_project_attachment_rest_required_fields( - request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_runtime_project_attachment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_delete_runtime_project_attachment_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.delete_runtime_project_attachment._get_unset_required_fields({}) - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_runtime_project_attachment_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_delete_runtime_project_attachment", - ) as pre: - pre.assert_not_called() - pb_message = runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest.pb( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = ( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_runtime_project_attachment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_runtime_project_attachment_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_runtime_project_attachment(request) - - -def test_delete_runtime_project_attachment_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/runtimeProjectAttachments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_runtime_project_attachment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/runtimeProjectAttachments/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_runtime_project_attachment_rest_flattened_error( - transport: str = "rest", -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_runtime_project_attachment( - runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest(), - name="name_value", - ) - - -def test_delete_runtime_project_attachment_rest_error(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, - dict, - ], -) -def test_lookup_runtime_project_attachment_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.lookup_runtime_project_attachment(request) - - # Establish that the response is the type that we expect. - assert isinstance( - response, - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse, - ) - - -def test_lookup_runtime_project_attachment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.lookup_runtime_project_attachment - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.lookup_runtime_project_attachment - ] = mock_rpc - - request = {} - client.lookup_runtime_project_attachment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.lookup_runtime_project_attachment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_lookup_runtime_project_attachment_rest_required_fields( - request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, -): - transport_class = transports.RuntimeProjectAttachmentServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).lookup_runtime_project_attachment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.lookup_runtime_project_attachment(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_lookup_runtime_project_attachment_rest_unset_required_fields(): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = ( - transport.lookup_runtime_project_attachment._get_unset_required_fields({}) - ) - assert set(unset_fields) == (set(()) & set(("name",))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lookup_runtime_project_attachment_rest_interceptors(null_interceptor): - transport = transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.RuntimeProjectAttachmentServiceRestInterceptor(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "post_lookup_runtime_project_attachment", - ) as post, mock.patch.object( - transports.RuntimeProjectAttachmentServiceRestInterceptor, - "pre_lookup_runtime_project_attachment", - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest.pb( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.to_json( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - - request = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() - ) - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - - client.lookup_runtime_project_attachment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_lookup_runtime_project_attachment_rest_bad_request( - transport: str = "rest", - request_type=runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.lookup_runtime_project_attachment(request) - - -def test_lookup_runtime_project_attachment_rest_flattened(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = ( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() - ) - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.lookup_runtime_project_attachment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*}:lookupRuntimeProjectAttachment" - % client.transport._host, - args[1], - ) - - -def test_lookup_runtime_project_attachment_rest_flattened_error( - transport: str = "rest", -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.lookup_runtime_project_attachment( - runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest(), - name="name_value", - ) - - -def test_lookup_runtime_project_attachment_rest_error(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RuntimeProjectAttachmentServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = RuntimeProjectAttachmentServiceClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - transports.RuntimeProjectAttachmentServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = RuntimeProjectAttachmentServiceClient.get_transport_class( - transport_name - )( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - ) - - -def test_runtime_project_attachment_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.RuntimeProjectAttachmentServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_runtime_project_attachment_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.RuntimeProjectAttachmentServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_runtime_project_attachment", - "get_runtime_project_attachment", - "list_runtime_project_attachments", - "delete_runtime_project_attachment", - "lookup_runtime_project_attachment", - "get_location", - "list_locations", - "get_operation", - "cancel_operation", - "delete_operation", - "list_operations", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_runtime_project_attachment_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RuntimeProjectAttachmentServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_runtime_project_attachment_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RuntimeProjectAttachmentServiceTransport() - adc.assert_called_once() - - -def test_runtime_project_attachment_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RuntimeProjectAttachmentServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - ], -) -def test_runtime_project_attachment_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - transports.RuntimeProjectAttachmentServiceRestTransport, - ], -) -def test_runtime_project_attachment_service_transport_auth_gdch_credentials( - transport_class, -): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.RuntimeProjectAttachmentServiceGrpcTransport, grpc_helpers), - ( - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - grpc_helpers_async, - ), - ], -) -def test_runtime_project_attachment_service_transport_create_channel( - transport_class, grpc_helpers -): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "apihub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="apihub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - ], -) -def test_runtime_project_attachment_service_grpc_transport_client_cert_source_for_mtls( - transport_class, -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_runtime_project_attachment_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.RuntimeProjectAttachmentServiceRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_runtime_project_attachment_service_host_no_port(transport_name): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_runtime_project_attachment_service_host_with_port(transport_name): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="apihub.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "apihub.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://apihub.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_runtime_project_attachment_service_client_transport_session_collision( - transport_name, -): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = RuntimeProjectAttachmentServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = RuntimeProjectAttachmentServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_runtime_project_attachment._session - session2 = client2.transport.create_runtime_project_attachment._session - assert session1 != session2 - session1 = client1.transport.get_runtime_project_attachment._session - session2 = client2.transport.get_runtime_project_attachment._session - assert session1 != session2 - session1 = client1.transport.list_runtime_project_attachments._session - session2 = client2.transport.list_runtime_project_attachments._session - assert session1 != session2 - session1 = client1.transport.delete_runtime_project_attachment._session - session2 = client2.transport.delete_runtime_project_attachment._session - assert session1 != session2 - session1 = client1.transport.lookup_runtime_project_attachment._session - session2 = client2.transport.lookup_runtime_project_attachment._session - assert session1 != session2 - - -def test_runtime_project_attachment_service_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.RuntimeProjectAttachmentServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_runtime_project_attachment_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - ], -) -def test_runtime_project_attachment_service_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.RuntimeProjectAttachmentServiceGrpcTransport, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, - ], -) -def test_runtime_project_attachment_service_transport_channel_mtls_with_adc( - transport_class, -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_runtime_project_attachment_path(): - project = "squid" - location = "clam" - runtime_project_attachment = "whelk" - expected = "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".format( - project=project, - location=location, - runtime_project_attachment=runtime_project_attachment, - ) - actual = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( - project, location, runtime_project_attachment - ) - assert expected == actual - - -def test_parse_runtime_project_attachment_path(): - expected = { - "project": "octopus", - "location": "oyster", - "runtime_project_attachment": "nudibranch", - } - path = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( - **expected - ) - - # Check that the path construction is reversible. - actual = ( - RuntimeProjectAttachmentServiceClient.parse_runtime_project_attachment_path( - path - ) - ) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = RuntimeProjectAttachmentServiceClient.common_billing_account_path( - billing_account - ) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = RuntimeProjectAttachmentServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_billing_account_path( - path - ) - assert expected == actual - - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = RuntimeProjectAttachmentServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = RuntimeProjectAttachmentServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = RuntimeProjectAttachmentServiceClient.common_organization_path( - organization - ) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = RuntimeProjectAttachmentServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format( - project=project, - ) - actual = RuntimeProjectAttachmentServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = RuntimeProjectAttachmentServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = RuntimeProjectAttachmentServiceClient.common_location_path( - project, location - ) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = RuntimeProjectAttachmentServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = RuntimeProjectAttachmentServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = RuntimeProjectAttachmentServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_location_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.GetLocationRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.GetLocationRequest, - dict, - ], -) -def test_get_location_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request( - transport: str = "rest", request_type=locations_pb2.ListLocationsRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({"name": "projects/sample1"}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) - - -@pytest.mark.parametrize( - "request_type", - [ - locations_pb2.ListLocationsRequest, - dict, - ], -) -def test_list_locations_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.DeleteOperationRequest, - dict, - ], -) -def test_delete_operation_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() @pytest.mark.parametrize( - "request_type", + "transport_name", [ - operations_pb2.ListOperationsRequest, - dict, + "rest", ], ) -def test_list_operations_rest(request_type): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_transport_kind(transport_name): + transport = RuntimeProjectAttachmentServiceClient.get_transport_class( + transport_name + )( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) + assert transport.kind == transport_name - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_runtime_project_attachment_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RuntimeProjectAttachmentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) - # Establish that the response is the type that we expect. - assert response is None +def test_runtime_project_attachment_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RuntimeProjectAttachmentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_delete_operation_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_runtime_project_attachment", + "get_runtime_project_attachment", + "list_runtime_project_attachments", + "delete_runtime_project_attachment", + "lookup_runtime_project_attachment", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - + with pytest.raises(NotImplementedError): + transport.close() -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_runtime_project_attachment_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RuntimeProjectAttachmentServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +def test_runtime_project_attachment_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.apihub_v1.services.runtime_project_attachment_service.transports.RuntimeProjectAttachmentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RuntimeProjectAttachmentServiceTransport() + adc.assert_called_once() -def test_delete_operation_from_dict(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation( - request={ - "name": "locations", - } +def test_runtime_project_attachment_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RuntimeProjectAttachmentServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_operation( - request={ - "name": "locations", - } +def test_runtime_project_attachment_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RuntimeProjectAttachmentServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) - call.assert_called() + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_cancel_operation(transport: str = "grpc"): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_runtime_project_attachment_service_host_no_port(transport_name): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com" + ), + transport=transport_name, ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + assert client.transport._host == ( + "apihub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com" ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_runtime_project_attachment_service_host_with_port(transport_name): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="apihub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "apihub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://apihub.googleapis.com:8000" ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_runtime_project_attachment_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RuntimeProjectAttachmentServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RuntimeProjectAttachmentServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_runtime_project_attachment._session + session2 = client2.transport.create_runtime_project_attachment._session + assert session1 != session2 + session1 = client1.transport.get_runtime_project_attachment._session + session2 = client2.transport.get_runtime_project_attachment._session + assert session1 != session2 + session1 = client1.transport.list_runtime_project_attachments._session + session2 = client2.transport.list_runtime_project_attachments._session + assert session1 != session2 + session1 = client1.transport.delete_runtime_project_attachment._session + session2 = client2.transport.delete_runtime_project_attachment._session + assert session1 != session2 + session1 = client1.transport.lookup_runtime_project_attachment._session + session2 = client2.transport.lookup_runtime_project_attachment._session + assert session1 != session2 -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_runtime_project_attachment_path(): + project = "squid" + location = "clam" + runtime_project_attachment = "whelk" + expected = "projects/{project}/locations/{location}/runtimeProjectAttachments/{runtime_project_attachment}".format( + project=project, + location=location, + runtime_project_attachment=runtime_project_attachment, + ) + actual = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( + project, location, runtime_project_attachment ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_runtime_project_attachment_path(): + expected = { + "project": "octopus", + "location": "oyster", + "runtime_project_attachment": "nudibranch", + } + path = RuntimeProjectAttachmentServiceClient.runtime_project_attachment_path( + **expected + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = ( + RuntimeProjectAttachmentServiceClient.parse_runtime_project_attachment_path( + path + ) + ) + assert expected == actual -def test_cancel_operation_from_dict(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RuntimeProjectAttachmentServiceClient.common_billing_account_path( + billing_account ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None + assert expected == actual - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = RuntimeProjectAttachmentServiceClient.common_billing_account_path(**expected) -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_billing_account_path( + path ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() + assert expected == actual -def test_get_operation(transport: str = "grpc"): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, ) + actual = RuntimeProjectAttachmentServiceClient.common_folder_path(folder) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = RuntimeProjectAttachmentServiceClient.common_folder_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_folder_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RuntimeProjectAttachmentServiceClient.common_organization_path( + organization ) + assert expected == actual - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = RuntimeProjectAttachmentServiceClient.common_organization_path(**expected) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_organization_path(path) + assert expected == actual -def test_get_operation_field_headers(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, ) + actual = RuntimeProjectAttachmentServiceClient.common_project_path(project) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = RuntimeProjectAttachmentServiceClient.common_project_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_project_path(path) + assert expected == actual -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RuntimeProjectAttachmentServiceClient.common_location_path( + project, location ) + assert expected == actual - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = RuntimeProjectAttachmentServiceClient.common_location_path(**expected) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Check that the path construction is reversible. + actual = RuntimeProjectAttachmentServiceClient.parse_common_location_path(path) + assert expected == actual -def test_get_operation_from_dict(): - client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() - response = client.get_operation( - request={ - "name": "locations", - } + with mock.patch.object( + transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = RuntimeProjectAttachmentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() - + prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } + with mock.patch.object( + transports.RuntimeProjectAttachmentServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = RuntimeProjectAttachmentServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) - call.assert_called() + prep.assert_called_once_with(client_info) -def test_list_operations(transport: str = "grpc"): +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) -def test_list_operations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.get_location(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) -def test_list_operations_from_dict(): +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) -def test_list_locations(transport: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) -def test_list_locations_field_headers(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response = client.cancel_operation(request) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Establish that the response is the type that we expect. + assert response is None -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) -def test_list_locations_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_operation(request) -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() + # Establish that the response is the type that we expect. + assert response is None -def test_get_location(transport: str = "grpc"): +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = RuntimeProjectAttachmentServiceAsyncClient( +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + assert isinstance(response, operations_pb2.Operation) -def test_get_location_field_headers(): +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): client = RuntimeProjectAttachmentServiceClient( - credentials=ga_credentials.AnonymousCredentials() + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials() + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations/abc", - ) in kw["metadata"] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) -def test_get_location_from_dict(): +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): client = RuntimeProjectAttachmentServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = RuntimeProjectAttachmentServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) def test_transport_close(): transports = { "rest": "_session", - "grpc": "_grpc_channel", } for transport, close_name in transports.items(): @@ -7323,7 +3681,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ "rest", - "grpc", ] for transport in transports: client = RuntimeProjectAttachmentServiceClient( @@ -7342,11 +3699,7 @@ def test_client_ctx(): [ ( RuntimeProjectAttachmentServiceClient, - transports.RuntimeProjectAttachmentServiceGrpcTransport, - ), - ( - RuntimeProjectAttachmentServiceAsyncClient, - transports.RuntimeProjectAttachmentServiceGrpcAsyncIOTransport, + transports.RuntimeProjectAttachmentServiceRestTransport, ), ], ) diff --git a/packages/google-cloud-asset/CHANGELOG.md b/packages/google-cloud-asset/CHANGELOG.md index a038645b6f8a..733c3b3086d1 100644 --- a/packages/google-cloud-asset/CHANGELOG.md +++ b/packages/google-cloud-asset/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-asset/#history +## [3.26.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.26.3...google-cloud-asset-v3.26.4) (2024-09-16) + + +### Documentation + +* [google-cloud-asset] Comments are clarified for certain fields in messages `QueryAssetsResponse` and `ResourceSearchResult` ([#13076](https://github.com/googleapis/google-cloud-python/issues/13076)) ([35b2c45](https://github.com/googleapis/google-cloud-python/commit/35b2c456c6791bc47ffe894f3ef966558cb6c98e)) + ## [3.26.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.26.2...google-cloud-asset-v3.26.3) (2024-07-30) diff --git a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py b/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py index 88d2a833272d..1d09f9e767c6 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/types/asset_service.py @@ -2780,9 +2780,11 @@ class QueryAssetsResponse(proto.Message): valid ``response``. If ``done`` == ``false`` and the query result is being saved - in a output, the output_config field will be set. If + in an output, the output_config field will be set. If ``done`` == ``true``, exactly one of ``error``, - ``query_result`` or ``output_config`` will be set. + ``query_result`` or ``output_config`` will be set. [done] is + unset unless the [QueryAssetsResponse] contains a + [QueryAssetsResponse.job_reference]. error (google.rpc.status_pb2.Status): Error status. @@ -2792,10 +2794,10 @@ class QueryAssetsResponse(proto.Message): This field is a member of `oneof`_ ``response``. output_config (google.cloud.asset_v1.types.QueryAssetsOutputConfig): - Output configuration which indicates instead - of being returned in API response on the fly, - the query result will be saved in a specific - output. + Output configuration, which indicates that + instead of being returned in an API response on + the fly, the query result will be saved in a + specific output. This field is a member of `oneof`_ ``response``. """ diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py b/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py index da13dc114c9b..d31228b6e04c 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/types/assets.py @@ -602,7 +602,7 @@ class EffectiveTagDetails(proto.Message): class ResourceSearchResult(proto.Message): r"""A result of Resource Search, containing information of a - cloud resource. Next ID: 34 + cloud resource. Attributes: name (str): @@ -684,8 +684,8 @@ class ResourceSearchResult(proto.Message): - Use a field query. Example: ``location:us-west*`` - Use a free text query. Example: ``us-west*`` labels (MutableMapping[str, str]): - Labels associated with this resource. See `Labelling and - grouping Google Cloud + User labels associated with this resource. See `Labelling + and grouping Google Cloud resources `__ for more information. This field is available only when the resource's Protobuf contains it. diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py index e6f4a2765144..d0552744bb07 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.3" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py index 558c8aab67c5..fdbe0bc4679f 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.26.4" # {x-release-please-version} diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index 5a90dfa88b31..ada630458cc7 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "0.1.0" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json index dcbeb822733b..d088663067e5 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "0.1.0" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json index 38eaede856da..6af620ebaf84 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "0.1.0" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json index 13ffc229e3a7..1d6c2a34e3ae 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "0.1.0" + "version": "3.26.4" }, "snippets": [ { diff --git a/packages/google-cloud-backupdr/CHANGELOG.md b/packages/google-cloud-backupdr/CHANGELOG.md index 52580c971528..55f275c138f5 100644 --- a/packages/google-cloud-backupdr/CHANGELOG.md +++ b/packages/google-cloud-backupdr/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-backupdr-v0.1.3...google-cloud-backupdr-v0.1.4) (2024-10-08) + + +### Features + +* [google-cloud-backupdr] Client library for the backupvault api is added ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* Add backupplan proto ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* Add backupplanassociation proto ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* Add backupvault_ba proto ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* Add backupvault_gce proto ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) + + +### Documentation + +* A comment for field `management_servers` in message `.google.cloud.backupdr.v1.ListManagementServersResponse` is changed ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* A comment for field `name` in message `.google.cloud.backupdr.v1.GetManagementServerRequest` is changed ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* A comment for field `oauth2_client_id` in message `.google.cloud.backupdr.v1.ManagementServer` is changed ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* A comment for field `parent` in message `.google.cloud.backupdr.v1.CreateManagementServerRequest` is changed ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* A comment for field `parent` in message `.google.cloud.backupdr.v1.ListManagementServersRequest` is changed ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) +* A comment for field `requested_cancellation` in message `.google.cloud.backupdr.v1.OperationMetadata` is changed ([27c262d](https://github.com/googleapis/google-cloud-python/commit/27c262d51c5d9f055152d9448f5fb6759da4bdb3)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-backupdr-v0.1.2...google-cloud-backupdr-v0.1.3) (2024-07-30) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py index c29b14037f34..5ab4e805a40b 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py @@ -33,6 +33,94 @@ WorkforceIdentityBasedManagementURI, WorkforceIdentityBasedOAuth2ClientID, ) +from google.cloud.backupdr_v1.types.backupplan import ( + BackupPlan, + BackupRule, + BackupWindow, + CreateBackupPlanRequest, + DeleteBackupPlanRequest, + GetBackupPlanRequest, + ListBackupPlansRequest, + ListBackupPlansResponse, + StandardSchedule, + WeekDayOfMonth, +) +from google.cloud.backupdr_v1.types.backupplanassociation import ( + BackupPlanAssociation, + CreateBackupPlanAssociationRequest, + DeleteBackupPlanAssociationRequest, + GetBackupPlanAssociationRequest, + ListBackupPlanAssociationsRequest, + ListBackupPlanAssociationsResponse, + RuleConfigInfo, + TriggerBackupRequest, +) +from google.cloud.backupdr_v1.types.backupvault import ( + Backup, + BackupApplianceBackupConfig, + BackupApplianceLockInfo, + BackupConfigInfo, + BackupConfigState, + BackupLock, + BackupVault, + BackupVaultView, + BackupView, + CreateBackupVaultRequest, + DataSource, + DataSourceBackupApplianceApplication, + DataSourceGcpResource, + DeleteBackupRequest, + DeleteBackupVaultRequest, + FetchUsableBackupVaultsRequest, + FetchUsableBackupVaultsResponse, + GcpBackupConfig, + GcpResource, + GetBackupRequest, + GetBackupVaultRequest, + GetDataSourceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + ListDataSourcesRequest, + ListDataSourcesResponse, + RestoreBackupRequest, + RestoreBackupResponse, + ServiceLockInfo, + TargetResource, + UpdateBackupRequest, + UpdateBackupVaultRequest, + UpdateDataSourceRequest, +) +from google.cloud.backupdr_v1.types.backupvault_ba import ( + BackupApplianceBackupProperties, +) +from google.cloud.backupdr_v1.types.backupvault_gce import ( + AcceleratorConfig, + AccessConfig, + AdvancedMachineFeatures, + AliasIpRange, + AllocationAffinity, + AttachedDisk, + ComputeInstanceBackupProperties, + ComputeInstanceDataSourceProperties, + ComputeInstanceRestoreProperties, + ComputeInstanceTargetEnvironment, + ConfidentialInstanceConfig, + CustomerEncryptionKey, + DisplayDevice, + Entry, + GuestOsFeature, + InstanceParams, + KeyRevocationActionType, + Metadata, + NetworkInterface, + NetworkPerformanceConfig, + Scheduling, + SchedulingDuration, + ServiceAccount, + Tags, +) __all__ = ( "BackupDRClient", @@ -48,4 +136,82 @@ "OperationMetadata", "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", + "BackupPlan", + "BackupRule", + "BackupWindow", + "CreateBackupPlanRequest", + "DeleteBackupPlanRequest", + "GetBackupPlanRequest", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "StandardSchedule", + "WeekDayOfMonth", + "BackupPlanAssociation", + "CreateBackupPlanAssociationRequest", + "DeleteBackupPlanAssociationRequest", + "GetBackupPlanAssociationRequest", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "RuleConfigInfo", + "TriggerBackupRequest", + "Backup", + "BackupApplianceBackupConfig", + "BackupApplianceLockInfo", + "BackupConfigInfo", + "BackupLock", + "BackupVault", + "CreateBackupVaultRequest", + "DataSource", + "DataSourceBackupApplianceApplication", + "DataSourceGcpResource", + "DeleteBackupRequest", + "DeleteBackupVaultRequest", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GcpBackupConfig", + "GcpResource", + "GetBackupRequest", + "GetBackupVaultRequest", + "GetDataSourceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "RestoreBackupRequest", + "RestoreBackupResponse", + "ServiceLockInfo", + "TargetResource", + "UpdateBackupRequest", + "UpdateBackupVaultRequest", + "UpdateDataSourceRequest", + "BackupConfigState", + "BackupVaultView", + "BackupView", + "BackupApplianceBackupProperties", + "AcceleratorConfig", + "AccessConfig", + "AdvancedMachineFeatures", + "AliasIpRange", + "AllocationAffinity", + "AttachedDisk", + "ComputeInstanceBackupProperties", + "ComputeInstanceDataSourceProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ConfidentialInstanceConfig", + "CustomerEncryptionKey", + "DisplayDevice", + "Entry", + "GuestOsFeature", + "InstanceParams", + "Metadata", + "NetworkInterface", + "NetworkPerformanceConfig", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "Tags", + "KeyRevocationActionType", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py index 558c8aab67c5..937ede8823ef 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py index a2dc2b97f601..eddcfa53658f 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py @@ -32,19 +32,183 @@ WorkforceIdentityBasedManagementURI, WorkforceIdentityBasedOAuth2ClientID, ) +from .types.backupplan import ( + BackupPlan, + BackupRule, + BackupWindow, + CreateBackupPlanRequest, + DeleteBackupPlanRequest, + GetBackupPlanRequest, + ListBackupPlansRequest, + ListBackupPlansResponse, + StandardSchedule, + WeekDayOfMonth, +) +from .types.backupplanassociation import ( + BackupPlanAssociation, + CreateBackupPlanAssociationRequest, + DeleteBackupPlanAssociationRequest, + GetBackupPlanAssociationRequest, + ListBackupPlanAssociationsRequest, + ListBackupPlanAssociationsResponse, + RuleConfigInfo, + TriggerBackupRequest, +) +from .types.backupvault import ( + Backup, + BackupApplianceBackupConfig, + BackupApplianceLockInfo, + BackupConfigInfo, + BackupConfigState, + BackupLock, + BackupVault, + BackupVaultView, + BackupView, + CreateBackupVaultRequest, + DataSource, + DataSourceBackupApplianceApplication, + DataSourceGcpResource, + DeleteBackupRequest, + DeleteBackupVaultRequest, + FetchUsableBackupVaultsRequest, + FetchUsableBackupVaultsResponse, + GcpBackupConfig, + GcpResource, + GetBackupRequest, + GetBackupVaultRequest, + GetDataSourceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + ListDataSourcesRequest, + ListDataSourcesResponse, + RestoreBackupRequest, + RestoreBackupResponse, + ServiceLockInfo, + TargetResource, + UpdateBackupRequest, + UpdateBackupVaultRequest, + UpdateDataSourceRequest, +) +from .types.backupvault_ba import BackupApplianceBackupProperties +from .types.backupvault_gce import ( + AcceleratorConfig, + AccessConfig, + AdvancedMachineFeatures, + AliasIpRange, + AllocationAffinity, + AttachedDisk, + ComputeInstanceBackupProperties, + ComputeInstanceDataSourceProperties, + ComputeInstanceRestoreProperties, + ComputeInstanceTargetEnvironment, + ConfidentialInstanceConfig, + CustomerEncryptionKey, + DisplayDevice, + Entry, + GuestOsFeature, + InstanceParams, + KeyRevocationActionType, + Metadata, + NetworkInterface, + NetworkPerformanceConfig, + Scheduling, + SchedulingDuration, + ServiceAccount, + Tags, +) __all__ = ( "BackupDRAsyncClient", + "AcceleratorConfig", + "AccessConfig", + "AdvancedMachineFeatures", + "AliasIpRange", + "AllocationAffinity", + "AttachedDisk", + "Backup", + "BackupApplianceBackupConfig", + "BackupApplianceBackupProperties", + "BackupApplianceLockInfo", + "BackupConfigInfo", + "BackupConfigState", "BackupDRClient", + "BackupLock", + "BackupPlan", + "BackupPlanAssociation", + "BackupRule", + "BackupVault", + "BackupVaultView", + "BackupView", + "BackupWindow", + "ComputeInstanceBackupProperties", + "ComputeInstanceDataSourceProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ConfidentialInstanceConfig", + "CreateBackupPlanAssociationRequest", + "CreateBackupPlanRequest", + "CreateBackupVaultRequest", "CreateManagementServerRequest", + "CustomerEncryptionKey", + "DataSource", + "DataSourceBackupApplianceApplication", + "DataSourceGcpResource", + "DeleteBackupPlanAssociationRequest", + "DeleteBackupPlanRequest", + "DeleteBackupRequest", + "DeleteBackupVaultRequest", "DeleteManagementServerRequest", + "DisplayDevice", + "Entry", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GcpBackupConfig", + "GcpResource", + "GetBackupPlanAssociationRequest", + "GetBackupPlanRequest", + "GetBackupRequest", + "GetBackupVaultRequest", + "GetDataSourceRequest", "GetManagementServerRequest", + "GuestOsFeature", + "InstanceParams", + "KeyRevocationActionType", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "ListBackupsRequest", + "ListBackupsResponse", + "ListDataSourcesRequest", + "ListDataSourcesResponse", "ListManagementServersRequest", "ListManagementServersResponse", "ManagementServer", "ManagementURI", + "Metadata", "NetworkConfig", + "NetworkInterface", + "NetworkPerformanceConfig", "OperationMetadata", + "RestoreBackupRequest", + "RestoreBackupResponse", + "RuleConfigInfo", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "ServiceLockInfo", + "StandardSchedule", + "Tags", + "TargetResource", + "TriggerBackupRequest", + "UpdateBackupRequest", + "UpdateBackupVaultRequest", + "UpdateDataSourceRequest", + "WeekDayOfMonth", "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json index 4a9d58bb8dad..902530688c39 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json @@ -10,75 +10,420 @@ "grpc": { "libraryClient": "BackupDRClient", "rpcs": { + "CreateBackupPlan": { + "methods": [ + "create_backup_plan" + ] + }, + "CreateBackupPlanAssociation": { + "methods": [ + "create_backup_plan_association" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateManagementServer": { "methods": [ "create_management_server" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPlan": { + "methods": [ + "delete_backup_plan" + ] + }, + "DeleteBackupPlanAssociation": { + "methods": [ + "delete_backup_plan_association" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteManagementServer": { "methods": [ "delete_management_server" ] }, + "FetchUsableBackupVaults": { + "methods": [ + "fetch_usable_backup_vaults" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPlan": { + "methods": [ + "get_backup_plan" + ] + }, + "GetBackupPlanAssociation": { + "methods": [ + "get_backup_plan_association" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" ] }, + "ListBackupPlanAssociations": { + "methods": [ + "list_backup_plan_associations" + ] + }, + "ListBackupPlans": { + "methods": [ + "list_backup_plans" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, "ListManagementServers": { "methods": [ "list_management_servers" ] + }, + "RestoreBackup": { + "methods": [ + "restore_backup" + ] + }, + "TriggerBackup": { + "methods": [ + "trigger_backup" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] } } }, "grpc-async": { "libraryClient": "BackupDRAsyncClient", "rpcs": { + "CreateBackupPlan": { + "methods": [ + "create_backup_plan" + ] + }, + "CreateBackupPlanAssociation": { + "methods": [ + "create_backup_plan_association" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateManagementServer": { "methods": [ "create_management_server" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPlan": { + "methods": [ + "delete_backup_plan" + ] + }, + "DeleteBackupPlanAssociation": { + "methods": [ + "delete_backup_plan_association" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteManagementServer": { "methods": [ "delete_management_server" ] }, + "FetchUsableBackupVaults": { + "methods": [ + "fetch_usable_backup_vaults" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPlan": { + "methods": [ + "get_backup_plan" + ] + }, + "GetBackupPlanAssociation": { + "methods": [ + "get_backup_plan_association" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" ] }, + "ListBackupPlanAssociations": { + "methods": [ + "list_backup_plan_associations" + ] + }, + "ListBackupPlans": { + "methods": [ + "list_backup_plans" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, "ListManagementServers": { "methods": [ "list_management_servers" ] + }, + "RestoreBackup": { + "methods": [ + "restore_backup" + ] + }, + "TriggerBackup": { + "methods": [ + "trigger_backup" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] } } }, "rest": { "libraryClient": "BackupDRClient", "rpcs": { + "CreateBackupPlan": { + "methods": [ + "create_backup_plan" + ] + }, + "CreateBackupPlanAssociation": { + "methods": [ + "create_backup_plan_association" + ] + }, + "CreateBackupVault": { + "methods": [ + "create_backup_vault" + ] + }, "CreateManagementServer": { "methods": [ "create_management_server" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupPlan": { + "methods": [ + "delete_backup_plan" + ] + }, + "DeleteBackupPlanAssociation": { + "methods": [ + "delete_backup_plan_association" + ] + }, + "DeleteBackupVault": { + "methods": [ + "delete_backup_vault" + ] + }, "DeleteManagementServer": { "methods": [ "delete_management_server" ] }, + "FetchUsableBackupVaults": { + "methods": [ + "fetch_usable_backup_vaults" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupPlan": { + "methods": [ + "get_backup_plan" + ] + }, + "GetBackupPlanAssociation": { + "methods": [ + "get_backup_plan_association" + ] + }, + "GetBackupVault": { + "methods": [ + "get_backup_vault" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" ] }, + "ListBackupPlanAssociations": { + "methods": [ + "list_backup_plan_associations" + ] + }, + "ListBackupPlans": { + "methods": [ + "list_backup_plans" + ] + }, + "ListBackupVaults": { + "methods": [ + "list_backup_vaults" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, "ListManagementServers": { "methods": [ "list_management_servers" ] + }, + "RestoreBackup": { + "methods": [ + "restore_backup" + ] + }, + "TriggerBackup": { + "methods": [ + "trigger_backup" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupVault": { + "methods": [ + "update_backup_vault" + ] + }, + "UpdateDataSource": { + "methods": [ + "update_data_source" + ] } } } diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py index 558c8aab67c5..937ede8823ef 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py index 33ea84a57365..ae020d8602f6 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py @@ -48,12 +48,21 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.backupdr_v1.services.backup_dr import pagers -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, + backupvault_ba, + backupvault_gce, +) from .client import BackupDRClient from .transports.base import DEFAULT_CLIENT_INFO, BackupDRTransport @@ -72,6 +81,20 @@ class BackupDRAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = BackupDRClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = BackupDRClient._DEFAULT_UNIVERSE + backup_path = staticmethod(BackupDRClient.backup_path) + parse_backup_path = staticmethod(BackupDRClient.parse_backup_path) + backup_plan_path = staticmethod(BackupDRClient.backup_plan_path) + parse_backup_plan_path = staticmethod(BackupDRClient.parse_backup_plan_path) + backup_plan_association_path = staticmethod( + BackupDRClient.backup_plan_association_path + ) + parse_backup_plan_association_path = staticmethod( + BackupDRClient.parse_backup_plan_association_path + ) + backup_vault_path = staticmethod(BackupDRClient.backup_vault_path) + parse_backup_vault_path = staticmethod(BackupDRClient.parse_backup_vault_path) + data_source_path = staticmethod(BackupDRClient.data_source_path) + parse_data_source_path = staticmethod(BackupDRClient.parse_data_source_path) management_server_path = staticmethod(BackupDRClient.management_server_path) parse_management_server_path = staticmethod( BackupDRClient.parse_management_server_path @@ -304,10 +327,10 @@ async def sample_list_management_servers(): parent (:class:`str`): Required. The project and location for which to retrieve management servers information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example - **us-central1**. To retrieve management servers for all - locations, use "-" for the ``{location}`` value. + 'projects/{project_id}/locations/{location}'. In Cloud + BackupDR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve management servers + for all locations, use "-" for the '{location}' value. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -428,7 +451,7 @@ async def sample_get_management_server(): name (:class:`str`): Required. Name of the management server resource name, in the format - ``projects/{project_id}/locations/{location}/managementServers/{resource_name}`` + 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -542,10 +565,9 @@ async def sample_create_management_server(): management server instance. parent (:class:`str`): Required. The management server project and location in - the format - ``projects/{project_id}/locations/{location}``. In Cloud - Backup and DR locations map to GCP regions, for example - **us-central1**. + the format 'projects/{project_id}/locations/{location}'. + In Cloud Backup and DR locations map to Google Cloud + regions, for example **us-central1**. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -766,6 +788,2928 @@ async def sample_delete_management_server(): # Done; return the response. return response + async def create_backup_vault( + self, + request: Optional[Union[backupvault.CreateBackupVaultRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_vault: Optional[backupvault.BackupVault] = None, + backup_vault_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new BackupVault in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupVaultRequest, dict]]): + The request object. Message for creating a BackupVault. + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault (:class:`google.cloud.backupdr_v1.types.BackupVault`): + Required. The resource being created + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault_id (:class:`str`): + Required. ID of the requesting object If auto-generating + ID server-side, remove this field and backup_vault_id + from the method_signature of Create RPC + + This corresponds to the ``backup_vault_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_vault, backup_vault_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.CreateBackupVaultRequest): + request = backupvault.CreateBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_vault is not None: + request.backup_vault = backup_vault + if backup_vault_id is not None: + request.backup_vault_id = backup_vault_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_backup_vaults( + self, + request: Optional[Union[backupvault.ListBackupVaultsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupVaultsAsyncPager: + r"""Lists BackupVaults in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupVaultsRequest, dict]]): + The request object. Request message for listing + backupvault stores. + parent (:class:`str`): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsAsyncPager: + Response message for listing + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupVaultsRequest): + request = backupvault.ListBackupVaultsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_vaults + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupVaultsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_usable_backup_vaults( + self, + request: Optional[ + Union[backupvault.FetchUsableBackupVaultsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchUsableBackupVaultsAsyncPager: + r"""FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest, dict]]): + The request object. Request message for fetching usable + BackupVaults. + parent (:class:`str`): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsAsyncPager: + Response message for fetching usable + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.FetchUsableBackupVaultsRequest): + request = backupvault.FetchUsableBackupVaultsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_usable_backup_vaults + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchUsableBackupVaultsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup_vault( + self, + request: Optional[Union[backupvault.GetBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.BackupVault: + r"""Gets details of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_vault(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupVaultRequest, dict]]): + The request object. Request message for getting a + BackupVault. + name (:class:`str`): + Required. Name of the backupvault store resource name, + in the format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupVault: + Message describing a BackupVault + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupVaultRequest): + request = backupvault.GetBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup_vault( + self, + request: Optional[Union[backupvault.UpdateBackupVaultRequest, dict]] = None, + *, + backup_vault: Optional[backupvault.BackupVault] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the settings of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateBackupVaultRequest, dict]]): + The request object. Request message for updating a + BackupVault. + backup_vault (:class:`google.cloud.backupdr_v1.types.BackupVault`): + Required. The resource being updated + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the BackupVault resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_vault, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupVaultRequest): + request = backupvault.UpdateBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_vault is not None: + request.backup_vault = backup_vault + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_vault.name", request.backup_vault.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup_vault( + self, + request: Optional[Union[backupvault.DeleteBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupVaultRequest, dict]]): + The request object. Message for deleting a BackupVault. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupVaultRequest): + request = backupvault.DeleteBackupVaultRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup_vault + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_data_sources( + self, + request: Optional[Union[backupvault.ListDataSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesAsyncPager: + r"""Lists DataSources in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListDataSourcesRequest, dict]]): + The request object. Request message for listing + DataSources. + parent (:class:`str`): + Required. The project and location for which to retrieve + data sources information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesAsyncPager: + Response message for listing + DataSources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListDataSourcesRequest): + request = backupvault.ListDataSourcesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_data_sources + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataSourcesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_source( + self, + request: Optional[Union[backupvault.GetDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.DataSource: + r"""Gets details of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetDataSourceRequest, dict]]): + The request object. Request message for getting a + DataSource instance. + name (:class:`str`): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.DataSource: + Message describing a DataSource + object. Datasource object used to + represent Datasource details for both + admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetDataSourceRequest): + request = backupvault.GetDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_source + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_data_source( + self, + request: Optional[Union[backupvault.UpdateDataSourceRequest, dict]] = None, + *, + data_source: Optional[backupvault.DataSource] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the settings of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateDataSourceRequest, dict]]): + The request object. Request message for updating a data + source instance. + data_source (:class:`google.cloud.backupdr_v1.types.DataSource`): + Required. The resource being updated + This corresponds to the ``data_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the DataSource resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.DataSource` Message describing a DataSource object. + Datasource object used to represent Datasource + details for both admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_source, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateDataSourceRequest): + request = backupvault.UpdateDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_source is not None: + request.data_source = data_source + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_data_source + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_source.name", request.data_source.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.DataSource, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_backups( + self, + request: Optional[Union[backupvault.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsAsyncPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupsRequest, dict]]): + The request object. Request message for listing Backups. + parent (:class:`str`): + Required. The project and location for which to retrieve + backup information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsAsyncPager: + Response message for listing Backups. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupsRequest): + request = backupvault.ListBackupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup( + self, + request: Optional[Union[backupvault.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.Backup: + r"""Gets details of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupRequest, dict]]): + The request object. Request message for getting a Backup. + name (:class:`str`): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.Backup: + Message describing a Backup object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupRequest): + request = backupvault.GetBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup( + self, + request: Optional[Union[backupvault.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[backupvault.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the settings of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateBackupRequest, dict]]): + The request object. Request message for updating a + Backup. + backup (:class:`google.cloud.backupdr_v1.types.Backup`): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupRequest): + request = backupvault.UpdateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_backup( + self, + request: Optional[Union[backupvault.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupRequest, dict]]): + The request object. Message for deleting a Backup. + name (:class:`str`): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupRequest): + request = backupvault.DeleteBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def restore_backup( + self, + request: Optional[Union[backupvault.RestoreBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Restore from a Backup + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.RestoreBackupRequest, dict]]): + The request object. Request message for restoring from a + Backup. + name (:class:`str`): + Required. The resource name of the Backup instance, in + the format + 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.RestoreBackupResponse` + Response message for restoring from a Backup. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.RestoreBackupRequest): + request = backupvault.RestoreBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.restore_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupvault.RestoreBackupResponse, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_backup_plan( + self, + request: Optional[Union[backupplan.CreateBackupPlanRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_plan: Optional[backupplan.BackupPlan] = None, + backup_plan_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a BackupPlan + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupPlanRequest, dict]]): + The request object. The request message for creating a ``BackupPlan``. + parent (:class:`str`): + Required. The ``BackupPlan`` project and location in the + format ``projects/{project}/locations/{location}``. In + Cloud BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan (:class:`google.cloud.backupdr_v1.types.BackupPlan`): + Required. The ``BackupPlan`` resource object to create. + This corresponds to the ``backup_plan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_id (:class:`str`): + Required. The name of the ``BackupPlan`` to create. The + name must be unique for the specified project and + location.The name must start with a lowercase letter + followed by up to 62 lowercase letters, numbers, or + hyphens. Pattern, /[a-z][a-z0-9-]{,62}/. + + This corresponds to the ``backup_plan_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlan` A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_plan, backup_plan_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.CreateBackupPlanRequest): + request = backupplan.CreateBackupPlanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan is not None: + request.backup_plan = backup_plan + if backup_plan_id is not None: + request.backup_plan_id = backup_plan_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_plan + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplan.BackupPlan, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_backup_plan( + self, + request: Optional[Union[backupplan.GetBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.BackupPlan: + r"""Gets details of a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupPlanRequest, dict]]): + The request object. The request message for getting a ``BackupPlan``. + name (:class:`str`): + Required. The resource name of the ``BackupPlan`` to + retrieve. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlan: + A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.GetBackupPlanRequest): + request = backupplan.GetBackupPlanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup_plan + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_plans( + self, + request: Optional[Union[backupplan.ListBackupPlansRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlansAsyncPager: + r"""Lists BackupPlans in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupPlansRequest, dict]]): + The request object. The request message for getting a list ``BackupPlan``. + parent (:class:`str`): + Required. The project and location for which to retrieve + ``BackupPlans`` information. Format: + ``projects/{project}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for e.g. + **us-central1**. To retrieve backup plans for all + locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansAsyncPager: + The response message for getting a list of BackupPlan. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.ListBackupPlansRequest): + request = backupplan.ListBackupPlansRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_plans + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupPlansAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup_plan( + self, + request: Optional[Union[backupplan.DeleteBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupPlanRequest, dict]]): + The request object. The request message for deleting a ``BackupPlan``. + name (:class:`str`): + Required. The resource name of the ``BackupPlan`` to + delete. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.DeleteBackupPlanRequest): + request = backupplan.DeleteBackupPlanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup_plan + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def create_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_plan_association: Optional[ + backupplanassociation.BackupPlanAssociation + ] = None, + backup_plan_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a BackupPlanAssociation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]]): + The request object. Request message for creating a backup + plan. + parent (:class:`str`): + Required. The backup plan association project and + location in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association (:class:`google.cloud.backupdr_v1.types.BackupPlanAssociation`): + Required. The resource being created + This corresponds to the ``backup_plan_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association_id (:class:`str`): + Required. The name of the backup plan + association to create. The name must be + unique for the specified project and + location. + + This corresponds to the ``backup_plan_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, backup_plan_association, backup_plan_association_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.CreateBackupPlanAssociationRequest + ): + request = backupplanassociation.CreateBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan_association is not None: + request.backup_plan_association = backup_plan_association + if backup_plan_association_id is not None: + request.backup_plan_association_id = backup_plan_association_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.GetBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.BackupPlanAssociation: + r"""Gets details of a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest, dict]]): + The request object. Request message for getting a + BackupPlanAssociation resource. + name (:class:`str`): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.GetBackupPlanAssociationRequest + ): + request = backupplanassociation.GetBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_plan_associations( + self, + request: Optional[ + Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlanAssociationsAsyncPager: + r"""Lists BackupPlanAssociations in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]]): + The request object. Request message for List + BackupPlanAssociation + parent (:class:`str`): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations + for all locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager: + Response message for List + BackupPlanAssociation + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.ListBackupPlanAssociationsRequest + ): + request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_plan_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupPlanAssociationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.DeleteBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest, dict]]): + The request object. Request message for deleting a backup + plan association. + name (:class:`str`): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.DeleteBackupPlanAssociationRequest + ): + request = backupplanassociation.DeleteBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def trigger_backup( + self, + request: Optional[ + Union[backupplanassociation.TriggerBackupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + rule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Triggers a new Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.TriggerBackupRequest, dict]]): + The request object. Request message for triggering a + backup. + name (:class:`str`): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rule_id (:class:`str`): + Required. backup rule_id for which a backup needs to be + triggered. + + This corresponds to the ``rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, rule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplanassociation.TriggerBackupRequest): + request = backupplanassociation.TriggerBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if rule_id is not None: + request.rule_id = rule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.trigger_backup + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py index a853cfead99d..b884fc527a39 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py @@ -54,12 +54,21 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.cloud.backupdr_v1.services.backup_dr import pagers -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, + backupvault_ba, + backupvault_gce, +) from .transports.base import DEFAULT_CLIENT_INFO, BackupDRTransport from .transports.grpc import BackupDRGrpcTransport @@ -191,6 +200,126 @@ def transport(self) -> BackupDRTransport: """ return self._transport + @staticmethod + def backup_path( + project: str, + location: str, + backupvault: str, + datasource: str, + backup: str, + ) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}/backups/{backup}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + backup=backup, + ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str, str]: + """Parses a backup path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)/dataSources/(?P.+?)/backups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_plan_path( + project: str, + location: str, + backup_plan: str, + ) -> str: + """Returns a fully-qualified backup_plan string.""" + return ( + "projects/{project}/locations/{location}/backupPlans/{backup_plan}".format( + project=project, + location=location, + backup_plan=backup_plan, + ) + ) + + @staticmethod + def parse_backup_plan_path(path: str) -> Dict[str, str]: + """Parses a backup_plan path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupPlans/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_plan_association_path( + project: str, + location: str, + backup_plan_association: str, + ) -> str: + """Returns a fully-qualified backup_plan_association string.""" + return "projects/{project}/locations/{location}/backupPlanAssociations/{backup_plan_association}".format( + project=project, + location=location, + backup_plan_association=backup_plan_association, + ) + + @staticmethod + def parse_backup_plan_association_path(path: str) -> Dict[str, str]: + """Parses a backup_plan_association path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupPlanAssociations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_vault_path( + project: str, + location: str, + backupvault: str, + ) -> str: + """Returns a fully-qualified backup_vault string.""" + return ( + "projects/{project}/locations/{location}/backupVaults/{backupvault}".format( + project=project, + location=location, + backupvault=backupvault, + ) + ) + + @staticmethod + def parse_backup_vault_path(path: str) -> Dict[str, str]: + """Parses a backup_vault path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_source_path( + project: str, + location: str, + backupvault: str, + datasource: str, + ) -> str: + """Returns a fully-qualified data_source string.""" + return "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + ) + + @staticmethod + def parse_data_source_path(path: str) -> Dict[str, str]: + """Parses a data_source path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)/dataSources/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def management_server_path( project: str, @@ -728,10 +857,10 @@ def sample_list_management_servers(): parent (str): Required. The project and location for which to retrieve management servers information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example - **us-central1**. To retrieve management servers for all - locations, use "-" for the ``{location}`` value. + 'projects/{project_id}/locations/{location}'. In Cloud + BackupDR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve management servers + for all locations, use "-" for the '{location}' value. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -849,7 +978,7 @@ def sample_get_management_server(): name (str): Required. Name of the management server resource name, in the format - ``projects/{project_id}/locations/{location}/managementServers/{resource_name}`` + 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -960,10 +1089,9 @@ def sample_create_management_server(): management server instance. parent (str): Required. The management server project and location in - the format - ``projects/{project_id}/locations/{location}``. In Cloud - Backup and DR locations map to GCP regions, for example - **us-central1**. + the format 'projects/{project_id}/locations/{location}'. + In Cloud Backup and DR locations map to Google Cloud + regions, for example **us-central1**. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1178,6 +1306,2869 @@ def sample_delete_management_server(): # Done; return the response. return response + def create_backup_vault( + self, + request: Optional[Union[backupvault.CreateBackupVaultRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_vault: Optional[backupvault.BackupVault] = None, + backup_vault_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new BackupVault in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.CreateBackupVaultRequest, dict]): + The request object. Message for creating a BackupVault. + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being created + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_vault_id (str): + Required. ID of the requesting object If auto-generating + ID server-side, remove this field and backup_vault_id + from the method_signature of Create RPC + + This corresponds to the ``backup_vault_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_vault, backup_vault_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.CreateBackupVaultRequest): + request = backupvault.CreateBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_vault is not None: + request.backup_vault = backup_vault + if backup_vault_id is not None: + request.backup_vault_id = backup_vault_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backup_vaults( + self, + request: Optional[Union[backupvault.ListBackupVaultsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupVaultsPager: + r"""Lists BackupVaults in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupVaultsRequest, dict]): + The request object. Request message for listing + backupvault stores. + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsPager: + Response message for listing + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupVaultsRequest): + request = backupvault.ListBackupVaultsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_vaults] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupVaultsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_usable_backup_vaults( + self, + request: Optional[ + Union[backupvault.FetchUsableBackupVaultsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchUsableBackupVaultsPager: + r"""FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest, dict]): + The request object. Request message for fetching usable + BackupVaults. + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve backupvault + stores for all locations, use "-" for the '{location}' + value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsPager: + Response message for fetching usable + BackupVaults. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.FetchUsableBackupVaultsRequest): + request = backupvault.FetchUsableBackupVaultsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_usable_backup_vaults + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchUsableBackupVaultsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup_vault( + self, + request: Optional[Union[backupvault.GetBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.BackupVault: + r"""Gets details of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_vault(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupVaultRequest, dict]): + The request object. Request message for getting a + BackupVault. + name (str): + Required. Name of the backupvault store resource name, + in the format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupVault: + Message describing a BackupVault + object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupVaultRequest): + request = backupvault.GetBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup_vault( + self, + request: Optional[Union[backupvault.UpdateBackupVaultRequest, dict]] = None, + *, + backup_vault: Optional[backupvault.BackupVault] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the settings of a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateBackupVaultRequest, dict]): + The request object. Request message for updating a + BackupVault. + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being updated + This corresponds to the ``backup_vault`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the BackupVault resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.BackupVault` + Message describing a BackupVault object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup_vault, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupVaultRequest): + request = backupvault.UpdateBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_vault is not None: + request.backup_vault = backup_vault + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_vault.name", request.backup_vault.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.BackupVault, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup_vault( + self, + request: Optional[Union[backupvault.DeleteBackupVaultRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a BackupVault. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupVaultRequest, dict]): + The request object. Message for deleting a BackupVault. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupVaultRequest): + request = backupvault.DeleteBackupVaultRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_vault] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_data_sources( + self, + request: Optional[Union[backupvault.ListDataSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesPager: + r"""Lists DataSources in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_data_sources(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListDataSourcesRequest, dict]): + The request object. Request message for listing + DataSources. + parent (str): + Required. The project and location for which to retrieve + data sources information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesPager: + Response message for listing + DataSources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListDataSourcesRequest): + request = backupvault.ListDataSourcesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_sources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataSourcesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_source( + self, + request: Optional[Union[backupvault.GetDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.DataSource: + r"""Gets details of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetDataSourceRequest, dict]): + The request object. Request message for getting a + DataSource instance. + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.DataSource: + Message describing a DataSource + object. Datasource object used to + represent Datasource details for both + admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetDataSourceRequest): + request = backupvault.GetDataSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_data_source( + self, + request: Optional[Union[backupvault.UpdateDataSourceRequest, dict]] = None, + *, + data_source: Optional[backupvault.DataSource] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the settings of a DataSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateDataSourceRequest, dict]): + The request object. Request message for updating a data + source instance. + data_source (google.cloud.backupdr_v1.types.DataSource): + Required. The resource being updated + This corresponds to the ``data_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the DataSource resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.DataSource` Message describing a DataSource object. + Datasource object used to represent Datasource + details for both admin and basic view. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([data_source, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateDataSourceRequest): + request = backupvault.UpdateDataSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_source is not None: + request.data_source = data_source + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_source.name", request.data_source.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.DataSource, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backups( + self, + request: Optional[Union[backupvault.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupsPager: + r"""Lists Backups in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backups(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupsRequest, dict]): + The request object. Request message for listing Backups. + parent (str): + Required. The project and location for which to retrieve + backup information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, + for example **us-central1**. To retrieve data sources + for all locations, use "-" for the '{location}' value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsPager: + Response message for listing Backups. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.ListBackupsRequest): + request = backupvault.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup( + self, + request: Optional[Union[backupvault.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.Backup: + r"""Gets details of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupRequest, dict]): + The request object. Request message for getting a Backup. + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.Backup: + Message describing a Backup object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.GetBackupRequest): + request = backupvault.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup( + self, + request: Optional[Union[backupvault.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[backupvault.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the settings of a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupRequest( + ) + + # Make the request + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateBackupRequest, dict]): + The request object. Request message for updating a + Backup. + backup (google.cloud.backupdr_v1.types.Backup): + Required. The resource being updated + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([backup, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.UpdateBackupRequest): + request = backupvault.UpdateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup.name", request.backup.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_backup( + self, + request: Optional[Union[backupvault.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupRequest, dict]): + The request object. Message for deleting a Backup. + name (str): + Required. Name of the resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.Backup` Message + describing a Backup object. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.DeleteBackupRequest): + request = backupvault.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.Backup, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def restore_backup( + self, + request: Optional[Union[backupvault.RestoreBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Restore from a Backup + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_restore_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" + + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", + ) + + # Make the request + operation = client.restore_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.RestoreBackupRequest, dict]): + The request object. Request message for restoring from a + Backup. + name (str): + Required. The resource name of the Backup instance, in + the format + 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.RestoreBackupResponse` + Response message for restoring from a Backup. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.RestoreBackupRequest): + request = backupvault.RestoreBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupvault.RestoreBackupResponse, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_backup_plan( + self, + request: Optional[Union[backupplan.CreateBackupPlanRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_plan: Optional[backupplan.BackupPlan] = None, + backup_plan_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a BackupPlan + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.CreateBackupPlanRequest, dict]): + The request object. The request message for creating a ``BackupPlan``. + parent (str): + Required. The ``BackupPlan`` project and location in the + format ``projects/{project}/locations/{location}``. In + Cloud BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan (google.cloud.backupdr_v1.types.BackupPlan): + Required. The ``BackupPlan`` resource object to create. + This corresponds to the ``backup_plan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_id (str): + Required. The name of the ``BackupPlan`` to create. The + name must be unique for the specified project and + location.The name must start with a lowercase letter + followed by up to 62 lowercase letters, numbers, or + hyphens. Pattern, /[a-z][a-z0-9-]{,62}/. + + This corresponds to the ``backup_plan_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlan` A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, backup_plan, backup_plan_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.CreateBackupPlanRequest): + request = backupplan.CreateBackupPlanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan is not None: + request.backup_plan = backup_plan + if backup_plan_id is not None: + request.backup_plan_id = backup_plan_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_plan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplan.BackupPlan, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_backup_plan( + self, + request: Optional[Union[backupplan.GetBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.BackupPlan: + r"""Gets details of a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupPlanRequest, dict]): + The request object. The request message for getting a ``BackupPlan``. + name (str): + Required. The resource name of the ``BackupPlan`` to + retrieve. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlan: + A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.GetBackupPlanRequest): + request = backupplan.GetBackupPlanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_plan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_plans( + self, + request: Optional[Union[backupplan.ListBackupPlansRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlansPager: + r"""Lists BackupPlans in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_plans(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlansRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plans(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupPlansRequest, dict]): + The request object. The request message for getting a list ``BackupPlan``. + parent (str): + Required. The project and location for which to retrieve + ``BackupPlans`` information. Format: + ``projects/{project}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for e.g. + **us-central1**. To retrieve backup plans for all + locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansPager: + The response message for getting a list of BackupPlan. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.ListBackupPlansRequest): + request = backupplan.ListBackupPlansRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_plans] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupPlansPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup_plan( + self, + request: Optional[Union[backupplan.DeleteBackupPlanRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupPlanRequest, dict]): + The request object. The request message for deleting a ``BackupPlan``. + name (str): + Required. The resource name of the ``BackupPlan`` to + delete. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.DeleteBackupPlanRequest): + request = backupplan.DeleteBackupPlanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_plan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def create_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_plan_association: Optional[ + backupplanassociation.BackupPlanAssociation + ] = None, + backup_plan_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a BackupPlanAssociation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]): + The request object. Request message for creating a backup + plan. + parent (str): + Required. The backup plan association project and + location in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): + Required. The resource being created + This corresponds to the ``backup_plan_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association_id (str): + Required. The name of the backup plan + association to create. The name must be + unique for the specified project and + location. + + This corresponds to the ``backup_plan_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, backup_plan_association, backup_plan_association_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.CreateBackupPlanAssociationRequest + ): + request = backupplanassociation.CreateBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan_association is not None: + request.backup_plan_association = backup_plan_association + if backup_plan_association_id is not None: + request.backup_plan_association_id = backup_plan_association_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.GetBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.BackupPlanAssociation: + r"""Gets details of a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest, dict]): + The request object. Request message for getting a + BackupPlanAssociation resource. + name (str): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.types.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.GetBackupPlanAssociationRequest + ): + request = backupplanassociation.GetBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_plan_associations( + self, + request: Optional[ + Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBackupPlanAssociationsPager: + r"""Lists BackupPlanAssociations in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]): + The request object. Request message for List + BackupPlanAssociation + parent (str): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations + for all locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager: + Response message for List + BackupPlanAssociation + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.ListBackupPlanAssociationsRequest + ): + request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_backup_plan_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupPlanAssociationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.DeleteBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest, dict]): + The request object. Request message for deleting a backup + plan association. + name (str): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.DeleteBackupPlanAssociationRequest + ): + request = backupplanassociation.DeleteBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def trigger_backup( + self, + request: Optional[ + Union[backupplanassociation.TriggerBackupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + rule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Triggers a new Backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.TriggerBackupRequest, dict]): + The request object. Request message for triggering a + backup. + name (str): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + rule_id (str): + Required. backup rule_id for which a backup needs to be + triggered. + + This corresponds to the ``rule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, rule_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplanassociation.TriggerBackupRequest): + request = backupplanassociation.TriggerBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if rule_id is not None: + request.rule_id = rule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.trigger_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "BackupDRClient": return self diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py index 462c609c80a8..3594ed629a56 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py @@ -38,7 +38,12 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) class ListManagementServersPager: @@ -191,3 +196,921 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupVaultsPager: + """A pager for iterating through ``list_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.ListBackupVaultsResponse], + request: backupvault.ListBackupVaultsRequest, + response: backupvault.ListBackupVaultsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.ListBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.BackupVault]: + for page in self.pages: + yield from page.backup_vaults + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupVaultsAsyncPager: + """A pager for iterating through ``list_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.ListBackupVaultsResponse]], + request: backupvault.ListBackupVaultsRequest, + response: backupvault.ListBackupVaultsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.ListBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.BackupVault]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_vaults: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchUsableBackupVaultsPager: + """A pager for iterating through ``fetch_usable_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchUsableBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.FetchUsableBackupVaultsResponse], + request: backupvault.FetchUsableBackupVaultsRequest, + response: backupvault.FetchUsableBackupVaultsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.FetchUsableBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.FetchUsableBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.BackupVault]: + for page in self.pages: + yield from page.backup_vaults + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchUsableBackupVaultsAsyncPager: + """A pager for iterating through ``fetch_usable_backup_vaults`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_vaults`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchUsableBackupVaults`` requests and continue to iterate + through the ``backup_vaults`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.FetchUsableBackupVaultsResponse]], + request: backupvault.FetchUsableBackupVaultsRequest, + response: backupvault.FetchUsableBackupVaultsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.FetchUsableBackupVaultsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.FetchUsableBackupVaultsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.BackupVault]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_vaults: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataSourcesPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.ListDataSourcesResponse], + request: backupvault.ListDataSourcesRequest, + response: backupvault.ListDataSourcesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListDataSourcesRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListDataSourcesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListDataSourcesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.DataSource]: + for page in self.pages: + yield from page.data_sources + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataSourcesAsyncPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.ListDataSourcesResponse]], + request: backupvault.ListDataSourcesRequest, + response: backupvault.ListDataSourcesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListDataSourcesRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListDataSourcesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListDataSourcesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.DataSource]: + async def async_generator(): + async for page in self.pages: + for response in page.data_sources: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.ListBackupsResponse], + request: backupvault.ListBackupsRequest, + response: backupvault.ListBackupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsAsyncPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupvault.ListBackupsResponse]], + request: backupvault.ListBackupsRequest, + response: backupvault.ListBackupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupvault.ListBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupvault.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.Backup]: + async def async_generator(): + async for page in self.pages: + for response in page.backups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlansPager: + """A pager for iterating through ``list_backup_plans`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_plans`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupPlans`` requests and continue to iterate + through the ``backup_plans`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupplan.ListBackupPlansResponse], + request: backupplan.ListBackupPlansRequest, + response: backupplan.ListBackupPlansResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlansRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlansResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplan.ListBackupPlansRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupplan.ListBackupPlansResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupplan.BackupPlan]: + for page in self.pages: + yield from page.backup_plans + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlansAsyncPager: + """A pager for iterating through ``list_backup_plans`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_plans`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupPlans`` requests and continue to iterate + through the ``backup_plans`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupplan.ListBackupPlansResponse]], + request: backupplan.ListBackupPlansRequest, + response: backupplan.ListBackupPlansResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlansRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlansResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplan.ListBackupPlansRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupplan.ListBackupPlansResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupplan.BackupPlan]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_plans: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlanAssociationsPager: + """A pager for iterating through ``list_backup_plan_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_plan_associations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupPlanAssociations`` requests and continue to iterate + through the ``backup_plan_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupplanassociation.ListBackupPlanAssociationsResponse], + request: backupplanassociation.ListBackupPlanAssociationsRequest, + response: backupplanassociation.ListBackupPlanAssociationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[backupplanassociation.ListBackupPlanAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupplanassociation.BackupPlanAssociation]: + for page in self.pages: + yield from page.backup_plan_associations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlanAssociationsAsyncPager: + """A pager for iterating through ``list_backup_plan_associations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_plan_associations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupPlanAssociations`` requests and continue to iterate + through the ``backup_plan_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse] + ], + request: backupplanassociation.ListBackupPlanAssociationsRequest, + response: backupplanassociation.ListBackupPlanAssociationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[backupplanassociation.ListBackupPlanAssociationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupplanassociation.BackupPlanAssociation]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_plan_associations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py index 0b741d07dfc8..e11f0ea29379 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py @@ -29,7 +29,12 @@ from google.oauth2 import service_account # type: ignore from google.cloud.backupdr_v1 import gapic_version as package_version -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -170,6 +175,202 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_backup_vault: gapic_v1.method.wrap_method( + self.create_backup_vault, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_backup_vaults: gapic_v1.method.wrap_method( + self.list_backup_vaults, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_usable_backup_vaults: gapic_v1.method.wrap_method( + self.fetch_usable_backup_vaults, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup_vault: gapic_v1.method.wrap_method( + self.get_backup_vault, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup_vault: gapic_v1.method.wrap_method( + self.update_backup_vault, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup_vault: gapic_v1.method.wrap_method( + self.delete_backup_vault, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_data_sources: gapic_v1.method.wrap_method( + self.list_data_sources, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_data_source: gapic_v1.method.wrap_method( + self.get_data_source, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_data_source: gapic_v1.method.wrap_method( + self.update_data_source, + default_timeout=60.0, + client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup: gapic_v1.method.wrap_method( + self.update_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, + default_timeout=None, + client_info=client_info, + ), + self.restore_backup: gapic_v1.method.wrap_method( + self.restore_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.create_backup_plan: gapic_v1.method.wrap_method( + self.create_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan: gapic_v1.method.wrap_method( + self.get_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plans: gapic_v1.method.wrap_method( + self.list_backup_plans, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan: gapic_v1.method.wrap_method( + self.delete_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.create_backup_plan_association: gapic_v1.method.wrap_method( + self.create_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan_association: gapic_v1.method.wrap_method( + self.get_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plan_associations: gapic_v1.method.wrap_method( + self.list_backup_plan_associations, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan_association: gapic_v1.method.wrap_method( + self.delete_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.trigger_backup: gapic_v1.method.wrap_method( + self.trigger_backup, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -225,6 +426,233 @@ def delete_management_server( ]: raise NotImplementedError() + @property + def create_backup_vault( + self, + ) -> Callable[ + [backupvault.CreateBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], + Union[ + backupvault.ListBackupVaultsResponse, + Awaitable[backupvault.ListBackupVaultsResponse], + ], + ]: + raise NotImplementedError() + + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + Union[ + backupvault.FetchUsableBackupVaultsResponse, + Awaitable[backupvault.FetchUsableBackupVaultsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_backup_vault( + self, + ) -> Callable[ + [backupvault.GetBackupVaultRequest], + Union[backupvault.BackupVault, Awaitable[backupvault.BackupVault]], + ]: + raise NotImplementedError() + + @property + def update_backup_vault( + self, + ) -> Callable[ + [backupvault.UpdateBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup_vault( + self, + ) -> Callable[ + [backupvault.DeleteBackupVaultRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], + Union[ + backupvault.ListDataSourcesResponse, + Awaitable[backupvault.ListDataSourcesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_data_source( + self, + ) -> Callable[ + [backupvault.GetDataSourceRequest], + Union[backupvault.DataSource, Awaitable[backupvault.DataSource]], + ]: + raise NotImplementedError() + + @property + def update_data_source( + self, + ) -> Callable[ + [backupvault.UpdateDataSourceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backups( + self, + ) -> Callable[ + [backupvault.ListBackupsRequest], + Union[ + backupvault.ListBackupsResponse, Awaitable[backupvault.ListBackupsResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_backup( + self, + ) -> Callable[ + [backupvault.GetBackupRequest], + Union[backupvault.Backup, Awaitable[backupvault.Backup]], + ]: + raise NotImplementedError() + + @property + def update_backup( + self, + ) -> Callable[ + [backupvault.UpdateBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_backup( + self, + ) -> Callable[ + [backupvault.DeleteBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def restore_backup( + self, + ) -> Callable[ + [backupvault.RestoreBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_backup_plan( + self, + ) -> Callable[ + [backupplan.CreateBackupPlanRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_backup_plan( + self, + ) -> Callable[ + [backupplan.GetBackupPlanRequest], + Union[backupplan.BackupPlan, Awaitable[backupplan.BackupPlan]], + ]: + raise NotImplementedError() + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], + Union[ + backupplan.ListBackupPlansResponse, + Awaitable[backupplan.ListBackupPlansResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_backup_plan( + self, + ) -> Callable[ + [backupplan.DeleteBackupPlanRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + Union[ + backupplanassociation.BackupPlanAssociation, + Awaitable[backupplanassociation.BackupPlanAssociation], + ], + ]: + raise NotImplementedError() + + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + Union[ + backupplanassociation.ListBackupPlanAssociationsResponse, + Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py index 39a429c884c0..63c287d5bfa1 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py @@ -26,7 +26,12 @@ from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) from .base import DEFAULT_CLIENT_INFO, BackupDRTransport @@ -361,6 +366,637 @@ def delete_management_server( ) return self._stubs["delete_management_server"] + @property + def create_backup_vault( + self, + ) -> Callable[[backupvault.CreateBackupVaultRequest], operations_pb2.Operation]: + r"""Return a callable for the create backup vault method over gRPC. + + Creates a new BackupVault in a given project and + location. + + Returns: + Callable[[~.CreateBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_vault" not in self._stubs: + self._stubs["create_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupVault", + request_serializer=backupvault.CreateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_vault"] + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], backupvault.ListBackupVaultsResponse + ]: + r"""Return a callable for the list backup vaults method over gRPC. + + Lists BackupVaults in a given project and location. + + Returns: + Callable[[~.ListBackupVaultsRequest], + ~.ListBackupVaultsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_vaults" not in self._stubs: + self._stubs["list_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupVaults", + request_serializer=backupvault.ListBackupVaultsRequest.serialize, + response_deserializer=backupvault.ListBackupVaultsResponse.deserialize, + ) + return self._stubs["list_backup_vaults"] + + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + backupvault.FetchUsableBackupVaultsResponse, + ]: + r"""Return a callable for the fetch usable backup vaults method over gRPC. + + FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + Returns: + Callable[[~.FetchUsableBackupVaultsRequest], + ~.FetchUsableBackupVaultsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_usable_backup_vaults" not in self._stubs: + self._stubs["fetch_usable_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchUsableBackupVaults", + request_serializer=backupvault.FetchUsableBackupVaultsRequest.serialize, + response_deserializer=backupvault.FetchUsableBackupVaultsResponse.deserialize, + ) + return self._stubs["fetch_usable_backup_vaults"] + + @property + def get_backup_vault( + self, + ) -> Callable[[backupvault.GetBackupVaultRequest], backupvault.BackupVault]: + r"""Return a callable for the get backup vault method over gRPC. + + Gets details of a BackupVault. + + Returns: + Callable[[~.GetBackupVaultRequest], + ~.BackupVault]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_vault" not in self._stubs: + self._stubs["get_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupVault", + request_serializer=backupvault.GetBackupVaultRequest.serialize, + response_deserializer=backupvault.BackupVault.deserialize, + ) + return self._stubs["get_backup_vault"] + + @property + def update_backup_vault( + self, + ) -> Callable[[backupvault.UpdateBackupVaultRequest], operations_pb2.Operation]: + r"""Return a callable for the update backup vault method over gRPC. + + Updates the settings of a BackupVault. + + Returns: + Callable[[~.UpdateBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_vault" not in self._stubs: + self._stubs["update_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackupVault", + request_serializer=backupvault.UpdateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_vault"] + + @property + def delete_backup_vault( + self, + ) -> Callable[[backupvault.DeleteBackupVaultRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup vault method over gRPC. + + Deletes a BackupVault. + + Returns: + Callable[[~.DeleteBackupVaultRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_vault" not in self._stubs: + self._stubs["delete_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupVault", + request_serializer=backupvault.DeleteBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_vault"] + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], backupvault.ListDataSourcesResponse + ]: + r"""Return a callable for the list data sources method over gRPC. + + Lists DataSources in a given project and location. + + Returns: + Callable[[~.ListDataSourcesRequest], + ~.ListDataSourcesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListDataSources", + request_serializer=backupvault.ListDataSourcesRequest.serialize, + response_deserializer=backupvault.ListDataSourcesResponse.deserialize, + ) + return self._stubs["list_data_sources"] + + @property + def get_data_source( + self, + ) -> Callable[[backupvault.GetDataSourceRequest], backupvault.DataSource]: + r"""Return a callable for the get data source method over gRPC. + + Gets details of a DataSource. + + Returns: + Callable[[~.GetDataSourceRequest], + ~.DataSource]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetDataSource", + request_serializer=backupvault.GetDataSourceRequest.serialize, + response_deserializer=backupvault.DataSource.deserialize, + ) + return self._stubs["get_data_source"] + + @property + def update_data_source( + self, + ) -> Callable[[backupvault.UpdateDataSourceRequest], operations_pb2.Operation]: + r"""Return a callable for the update data source method over gRPC. + + Updates the settings of a DataSource. + + Returns: + Callable[[~.UpdateDataSourceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_source" not in self._stubs: + self._stubs["update_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateDataSource", + request_serializer=backupvault.UpdateDataSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_data_source"] + + @property + def list_backups( + self, + ) -> Callable[[backupvault.ListBackupsRequest], backupvault.ListBackupsResponse]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + ~.ListBackupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackups", + request_serializer=backupvault.ListBackupsRequest.serialize, + response_deserializer=backupvault.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup( + self, + ) -> Callable[[backupvault.GetBackupRequest], backupvault.Backup]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a Backup. + + Returns: + Callable[[~.GetBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackup", + request_serializer=backupvault.GetBackupRequest.serialize, + response_deserializer=backupvault.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def update_backup( + self, + ) -> Callable[[backupvault.UpdateBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the update backup method over gRPC. + + Updates the settings of a Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackup", + request_serializer=backupvault.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[[backupvault.DeleteBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackup", + request_serializer=backupvault.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def restore_backup( + self, + ) -> Callable[[backupvault.RestoreBackupRequest], operations_pb2.Operation]: + r"""Return a callable for the restore backup method over gRPC. + + Restore from a Backup + + Returns: + Callable[[~.RestoreBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_backup" not in self._stubs: + self._stubs["restore_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/RestoreBackup", + request_serializer=backupvault.RestoreBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_backup"] + + @property + def create_backup_plan( + self, + ) -> Callable[[backupplan.CreateBackupPlanRequest], operations_pb2.Operation]: + r"""Return a callable for the create backup plan method over gRPC. + + Create a BackupPlan + + Returns: + Callable[[~.CreateBackupPlanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan" not in self._stubs: + self._stubs["create_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlan", + request_serializer=backupplan.CreateBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan"] + + @property + def get_backup_plan( + self, + ) -> Callable[[backupplan.GetBackupPlanRequest], backupplan.BackupPlan]: + r"""Return a callable for the get backup plan method over gRPC. + + Gets details of a single BackupPlan. + + Returns: + Callable[[~.GetBackupPlanRequest], + ~.BackupPlan]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan" not in self._stubs: + self._stubs["get_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlan", + request_serializer=backupplan.GetBackupPlanRequest.serialize, + response_deserializer=backupplan.BackupPlan.deserialize, + ) + return self._stubs["get_backup_plan"] + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], backupplan.ListBackupPlansResponse + ]: + r"""Return a callable for the list backup plans method over gRPC. + + Lists BackupPlans in a given project and location. + + Returns: + Callable[[~.ListBackupPlansRequest], + ~.ListBackupPlansResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plans" not in self._stubs: + self._stubs["list_backup_plans"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlans", + request_serializer=backupplan.ListBackupPlansRequest.serialize, + response_deserializer=backupplan.ListBackupPlansResponse.deserialize, + ) + return self._stubs["list_backup_plans"] + + @property + def delete_backup_plan( + self, + ) -> Callable[[backupplan.DeleteBackupPlanRequest], operations_pb2.Operation]: + r"""Return a callable for the delete backup plan method over gRPC. + + Deletes a single BackupPlan. + + Returns: + Callable[[~.DeleteBackupPlanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan" not in self._stubs: + self._stubs["delete_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlan", + request_serializer=backupplan.DeleteBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan"] + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create backup plan association method over gRPC. + + Create a BackupPlanAssociation + + Returns: + Callable[[~.CreateBackupPlanAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan_association" not in self._stubs: + self._stubs[ + "create_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlanAssociation", + request_serializer=backupplanassociation.CreateBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan_association"] + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + backupplanassociation.BackupPlanAssociation, + ]: + r"""Return a callable for the get backup plan association method over gRPC. + + Gets details of a single BackupPlanAssociation. + + Returns: + Callable[[~.GetBackupPlanAssociationRequest], + ~.BackupPlanAssociation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan_association" not in self._stubs: + self._stubs["get_backup_plan_association"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlanAssociation", + request_serializer=backupplanassociation.GetBackupPlanAssociationRequest.serialize, + response_deserializer=backupplanassociation.BackupPlanAssociation.deserialize, + ) + return self._stubs["get_backup_plan_association"] + + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + backupplanassociation.ListBackupPlanAssociationsResponse, + ]: + r"""Return a callable for the list backup plan associations method over gRPC. + + Lists BackupPlanAssociations in a given project and + location. + + Returns: + Callable[[~.ListBackupPlanAssociationsRequest], + ~.ListBackupPlanAssociationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plan_associations" not in self._stubs: + self._stubs[ + "list_backup_plan_associations" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlanAssociations", + request_serializer=backupplanassociation.ListBackupPlanAssociationsRequest.serialize, + response_deserializer=backupplanassociation.ListBackupPlanAssociationsResponse.deserialize, + ) + return self._stubs["list_backup_plan_associations"] + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete backup plan association method over gRPC. + + Deletes a single BackupPlanAssociation. + + Returns: + Callable[[~.DeleteBackupPlanAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan_association" not in self._stubs: + self._stubs[ + "delete_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlanAssociation", + request_serializer=backupplanassociation.DeleteBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan_association"] + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], operations_pb2.Operation + ]: + r"""Return a callable for the trigger backup method over gRPC. + + Triggers a new Backup. + + Returns: + Callable[[~.TriggerBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "trigger_backup" not in self._stubs: + self._stubs["trigger_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/TriggerBackup", + request_serializer=backupplanassociation.TriggerBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["trigger_backup"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py index 26b64ba6a60c..9acd2b61c3fb 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py @@ -28,7 +28,12 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) from .base import DEFAULT_CLIENT_INFO, BackupDRTransport from .grpc import BackupDRGrpcTransport @@ -374,6 +379,665 @@ def delete_management_server( ) return self._stubs["delete_management_server"] + @property + def create_backup_vault( + self, + ) -> Callable[ + [backupvault.CreateBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create backup vault method over gRPC. + + Creates a new BackupVault in a given project and + location. + + Returns: + Callable[[~.CreateBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_vault" not in self._stubs: + self._stubs["create_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupVault", + request_serializer=backupvault.CreateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_vault"] + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], + Awaitable[backupvault.ListBackupVaultsResponse], + ]: + r"""Return a callable for the list backup vaults method over gRPC. + + Lists BackupVaults in a given project and location. + + Returns: + Callable[[~.ListBackupVaultsRequest], + Awaitable[~.ListBackupVaultsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_vaults" not in self._stubs: + self._stubs["list_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupVaults", + request_serializer=backupvault.ListBackupVaultsRequest.serialize, + response_deserializer=backupvault.ListBackupVaultsResponse.deserialize, + ) + return self._stubs["list_backup_vaults"] + + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + Awaitable[backupvault.FetchUsableBackupVaultsResponse], + ]: + r"""Return a callable for the fetch usable backup vaults method over gRPC. + + FetchUsableBackupVaults lists usable BackupVaults in + a given project and location. Usable BackupVault are the + ones that user has backupdr.backupVaults.get permission. + + Returns: + Callable[[~.FetchUsableBackupVaultsRequest], + Awaitable[~.FetchUsableBackupVaultsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_usable_backup_vaults" not in self._stubs: + self._stubs["fetch_usable_backup_vaults"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchUsableBackupVaults", + request_serializer=backupvault.FetchUsableBackupVaultsRequest.serialize, + response_deserializer=backupvault.FetchUsableBackupVaultsResponse.deserialize, + ) + return self._stubs["fetch_usable_backup_vaults"] + + @property + def get_backup_vault( + self, + ) -> Callable[ + [backupvault.GetBackupVaultRequest], Awaitable[backupvault.BackupVault] + ]: + r"""Return a callable for the get backup vault method over gRPC. + + Gets details of a BackupVault. + + Returns: + Callable[[~.GetBackupVaultRequest], + Awaitable[~.BackupVault]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_vault" not in self._stubs: + self._stubs["get_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupVault", + request_serializer=backupvault.GetBackupVaultRequest.serialize, + response_deserializer=backupvault.BackupVault.deserialize, + ) + return self._stubs["get_backup_vault"] + + @property + def update_backup_vault( + self, + ) -> Callable[ + [backupvault.UpdateBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update backup vault method over gRPC. + + Updates the settings of a BackupVault. + + Returns: + Callable[[~.UpdateBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_vault" not in self._stubs: + self._stubs["update_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackupVault", + request_serializer=backupvault.UpdateBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_vault"] + + @property + def delete_backup_vault( + self, + ) -> Callable[ + [backupvault.DeleteBackupVaultRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup vault method over gRPC. + + Deletes a BackupVault. + + Returns: + Callable[[~.DeleteBackupVaultRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_vault" not in self._stubs: + self._stubs["delete_backup_vault"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupVault", + request_serializer=backupvault.DeleteBackupVaultRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_vault"] + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], + Awaitable[backupvault.ListDataSourcesResponse], + ]: + r"""Return a callable for the list data sources method over gRPC. + + Lists DataSources in a given project and location. + + Returns: + Callable[[~.ListDataSourcesRequest], + Awaitable[~.ListDataSourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListDataSources", + request_serializer=backupvault.ListDataSourcesRequest.serialize, + response_deserializer=backupvault.ListDataSourcesResponse.deserialize, + ) + return self._stubs["list_data_sources"] + + @property + def get_data_source( + self, + ) -> Callable[ + [backupvault.GetDataSourceRequest], Awaitable[backupvault.DataSource] + ]: + r"""Return a callable for the get data source method over gRPC. + + Gets details of a DataSource. + + Returns: + Callable[[~.GetDataSourceRequest], + Awaitable[~.DataSource]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetDataSource", + request_serializer=backupvault.GetDataSourceRequest.serialize, + response_deserializer=backupvault.DataSource.deserialize, + ) + return self._stubs["get_data_source"] + + @property + def update_data_source( + self, + ) -> Callable[ + [backupvault.UpdateDataSourceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update data source method over gRPC. + + Updates the settings of a DataSource. + + Returns: + Callable[[~.UpdateDataSourceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_source" not in self._stubs: + self._stubs["update_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateDataSource", + request_serializer=backupvault.UpdateDataSourceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_data_source"] + + @property + def list_backups( + self, + ) -> Callable[ + [backupvault.ListBackupsRequest], Awaitable[backupvault.ListBackupsResponse] + ]: + r"""Return a callable for the list backups method over gRPC. + + Lists Backups in a given project and location. + + Returns: + Callable[[~.ListBackupsRequest], + Awaitable[~.ListBackupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backups" not in self._stubs: + self._stubs["list_backups"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackups", + request_serializer=backupvault.ListBackupsRequest.serialize, + response_deserializer=backupvault.ListBackupsResponse.deserialize, + ) + return self._stubs["list_backups"] + + @property + def get_backup( + self, + ) -> Callable[[backupvault.GetBackupRequest], Awaitable[backupvault.Backup]]: + r"""Return a callable for the get backup method over gRPC. + + Gets details of a Backup. + + Returns: + Callable[[~.GetBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup" not in self._stubs: + self._stubs["get_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackup", + request_serializer=backupvault.GetBackupRequest.serialize, + response_deserializer=backupvault.Backup.deserialize, + ) + return self._stubs["get_backup"] + + @property + def update_backup( + self, + ) -> Callable[ + [backupvault.UpdateBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update backup method over gRPC. + + Updates the settings of a Backup. + + Returns: + Callable[[~.UpdateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup" not in self._stubs: + self._stubs["update_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackup", + request_serializer=backupvault.UpdateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup"] + + @property + def delete_backup( + self, + ) -> Callable[ + [backupvault.DeleteBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a Backup. + + Returns: + Callable[[~.DeleteBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup" not in self._stubs: + self._stubs["delete_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackup", + request_serializer=backupvault.DeleteBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup"] + + @property + def restore_backup( + self, + ) -> Callable[ + [backupvault.RestoreBackupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the restore backup method over gRPC. + + Restore from a Backup + + Returns: + Callable[[~.RestoreBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "restore_backup" not in self._stubs: + self._stubs["restore_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/RestoreBackup", + request_serializer=backupvault.RestoreBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["restore_backup"] + + @property + def create_backup_plan( + self, + ) -> Callable[ + [backupplan.CreateBackupPlanRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create backup plan method over gRPC. + + Create a BackupPlan + + Returns: + Callable[[~.CreateBackupPlanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan" not in self._stubs: + self._stubs["create_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlan", + request_serializer=backupplan.CreateBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan"] + + @property + def get_backup_plan( + self, + ) -> Callable[[backupplan.GetBackupPlanRequest], Awaitable[backupplan.BackupPlan]]: + r"""Return a callable for the get backup plan method over gRPC. + + Gets details of a single BackupPlan. + + Returns: + Callable[[~.GetBackupPlanRequest], + Awaitable[~.BackupPlan]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan" not in self._stubs: + self._stubs["get_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlan", + request_serializer=backupplan.GetBackupPlanRequest.serialize, + response_deserializer=backupplan.BackupPlan.deserialize, + ) + return self._stubs["get_backup_plan"] + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], + Awaitable[backupplan.ListBackupPlansResponse], + ]: + r"""Return a callable for the list backup plans method over gRPC. + + Lists BackupPlans in a given project and location. + + Returns: + Callable[[~.ListBackupPlansRequest], + Awaitable[~.ListBackupPlansResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plans" not in self._stubs: + self._stubs["list_backup_plans"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlans", + request_serializer=backupplan.ListBackupPlansRequest.serialize, + response_deserializer=backupplan.ListBackupPlansResponse.deserialize, + ) + return self._stubs["list_backup_plans"] + + @property + def delete_backup_plan( + self, + ) -> Callable[ + [backupplan.DeleteBackupPlanRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete backup plan method over gRPC. + + Deletes a single BackupPlan. + + Returns: + Callable[[~.DeleteBackupPlanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan" not in self._stubs: + self._stubs["delete_backup_plan"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlan", + request_serializer=backupplan.DeleteBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan"] + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create backup plan association method over gRPC. + + Create a BackupPlanAssociation + + Returns: + Callable[[~.CreateBackupPlanAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_backup_plan_association" not in self._stubs: + self._stubs[ + "create_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/CreateBackupPlanAssociation", + request_serializer=backupplanassociation.CreateBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_backup_plan_association"] + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + Awaitable[backupplanassociation.BackupPlanAssociation], + ]: + r"""Return a callable for the get backup plan association method over gRPC. + + Gets details of a single BackupPlanAssociation. + + Returns: + Callable[[~.GetBackupPlanAssociationRequest], + Awaitable[~.BackupPlanAssociation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan_association" not in self._stubs: + self._stubs["get_backup_plan_association"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlanAssociation", + request_serializer=backupplanassociation.GetBackupPlanAssociationRequest.serialize, + response_deserializer=backupplanassociation.BackupPlanAssociation.deserialize, + ) + return self._stubs["get_backup_plan_association"] + + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse], + ]: + r"""Return a callable for the list backup plan associations method over gRPC. + + Lists BackupPlanAssociations in a given project and + location. + + Returns: + Callable[[~.ListBackupPlanAssociationsRequest], + Awaitable[~.ListBackupPlanAssociationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plan_associations" not in self._stubs: + self._stubs[ + "list_backup_plan_associations" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlanAssociations", + request_serializer=backupplanassociation.ListBackupPlanAssociationsRequest.serialize, + response_deserializer=backupplanassociation.ListBackupPlanAssociationsResponse.deserialize, + ) + return self._stubs["list_backup_plan_associations"] + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete backup plan association method over gRPC. + + Deletes a single BackupPlanAssociation. + + Returns: + Callable[[~.DeleteBackupPlanAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_backup_plan_association" not in self._stubs: + self._stubs[ + "delete_backup_plan_association" + ] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlanAssociation", + request_serializer=backupplanassociation.DeleteBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_backup_plan_association"] + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the trigger backup method over gRPC. + + Triggers a new Backup. + + Returns: + Callable[[~.TriggerBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "trigger_backup" not in self._stubs: + self._stubs["trigger_backup"] = self.grpc_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/TriggerBackup", + request_serializer=backupplanassociation.TriggerBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["trigger_backup"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -415,6 +1079,202 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_backup_vault: gapic_v1.method_async.wrap_method( + self.create_backup_vault, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_backup_vaults: gapic_v1.method_async.wrap_method( + self.list_backup_vaults, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_usable_backup_vaults: gapic_v1.method_async.wrap_method( + self.fetch_usable_backup_vaults, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup_vault: gapic_v1.method_async.wrap_method( + self.get_backup_vault, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup_vault: gapic_v1.method_async.wrap_method( + self.update_backup_vault, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup_vault: gapic_v1.method_async.wrap_method( + self.delete_backup_vault, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_data_sources: gapic_v1.method_async.wrap_method( + self.list_data_sources, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_data_source: gapic_v1.method_async.wrap_method( + self.get_data_source, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_data_source: gapic_v1.method_async.wrap_method( + self.update_data_source, + default_timeout=60.0, + client_info=client_info, + ), + self.list_backups: gapic_v1.method_async.wrap_method( + self.list_backups, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method_async.wrap_method( + self.get_backup, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_backup: gapic_v1.method_async.wrap_method( + self.update_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method_async.wrap_method( + self.delete_backup, + default_timeout=None, + client_info=client_info, + ), + self.restore_backup: gapic_v1.method_async.wrap_method( + self.restore_backup, + default_timeout=60.0, + client_info=client_info, + ), + self.create_backup_plan: gapic_v1.method_async.wrap_method( + self.create_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan: gapic_v1.method_async.wrap_method( + self.get_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plans: gapic_v1.method_async.wrap_method( + self.list_backup_plans, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan: gapic_v1.method_async.wrap_method( + self.delete_backup_plan, + default_timeout=None, + client_info=client_info, + ), + self.create_backup_plan_association: gapic_v1.method_async.wrap_method( + self.create_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_plan_association: gapic_v1.method_async.wrap_method( + self.get_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plan_associations: gapic_v1.method_async.wrap_method( + self.list_backup_plan_associations, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup_plan_association: gapic_v1.method_async.wrap_method( + self.delete_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), + self.trigger_backup: gapic_v1.method_async.wrap_method( + self.trigger_backup, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py index 621153ce0574..2ff0d1ccf3b9 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py @@ -47,7 +47,12 @@ from google.longrunning import operations_pb2 # type: ignore -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, +) from .base import BackupDRTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -74,6 +79,30 @@ class BackupDRRestInterceptor: .. code-block:: python class MyCustomBackupDRInterceptor(BackupDRRestInterceptor): + def pre_create_backup_plan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_plan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup_plan_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_plan_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_management_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -82,6 +111,38 @@ def post_create_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_plan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_plan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_plan_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_plan_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_management_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -90,6 +151,54 @@ def post_delete_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_fetch_usable_backup_vaults(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_usable_backup_vaults(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_plan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_plan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_plan_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_plan_association(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_data_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_source(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_management_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -98,6 +207,46 @@ def post_get_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_list_backup_plan_associations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_plan_associations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_plans(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_plans(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_vaults(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_vaults(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_sources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_sources(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_management_servers(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -106,28 +255,68 @@ def post_list_management_servers(self, response): logging.log(f"Received response: {response}") return response + def pre_restore_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_trigger_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_trigger_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup_vault(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup_vault(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_data_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_source(self, response): + logging.log(f"Received response: {response}") + return response + transport = BackupDRRestTransport(interceptor=MyCustomBackupDRInterceptor()) client = BackupDRClient(transport=transport) """ - def pre_create_management_server( + def pre_create_backup_plan( self, - request: backupdr.CreateManagementServerRequest, + request: backupplan.CreateBackupPlanRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.CreateManagementServerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_management_server + ) -> Tuple[backupplan.CreateBackupPlanRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup_plan Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_create_management_server( + def post_create_backup_plan( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for create_management_server + """Post-rpc interceptor for create_backup_plan Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -135,22 +324,25 @@ def post_create_management_server( """ return response - def pre_delete_management_server( + def pre_create_backup_plan_association( self, - request: backupdr.DeleteManagementServerRequest, + request: backupplanassociation.CreateBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.DeleteManagementServerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_management_server + ) -> Tuple[ + backupplanassociation.CreateBackupPlanAssociationRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for create_backup_plan_association Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_delete_management_server( + def post_create_backup_plan_association( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_management_server + """Post-rpc interceptor for create_backup_plan_association Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -158,22 +350,22 @@ def post_delete_management_server( """ return response - def pre_get_management_server( + def pre_create_backup_vault( self, - request: backupdr.GetManagementServerRequest, + request: backupvault.CreateBackupVaultRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.GetManagementServerRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_management_server + ) -> Tuple[backupvault.CreateBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_backup_vault Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_management_server( - self, response: backupdr.ManagementServer - ) -> backupdr.ManagementServer: - """Post-rpc interceptor for get_management_server + def post_create_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_backup_vault Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -181,22 +373,22 @@ def post_get_management_server( """ return response - def pre_list_management_servers( + def pre_create_management_server( self, - request: backupdr.ListManagementServersRequest, + request: backupdr.CreateManagementServerRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[backupdr.ListManagementServersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_management_servers + ) -> Tuple[backupdr.CreateManagementServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_management_server Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_list_management_servers( - self, response: backupdr.ListManagementServersResponse - ) -> backupdr.ListManagementServersResponse: - """Post-rpc interceptor for list_management_servers + def post_create_management_server( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_management_server Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -204,22 +396,22 @@ def post_list_management_servers( """ return response - def pre_get_location( + def pre_delete_backup( self, - request: locations_pb2.GetLocationRequest, + request: backupvault.DeleteBackupRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_location + ) -> Tuple[backupvault.DeleteBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location + def post_delete_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -227,22 +419,22 @@ def post_get_location( """ return response - def pre_list_locations( + def pre_delete_backup_plan( self, - request: locations_pb2.ListLocationsRequest, + request: backupplan.DeleteBackupPlanRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_locations + ) -> Tuple[backupplan.DeleteBackupPlanRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup_plan Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations + def post_delete_backup_plan( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_plan Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -250,20 +442,25 @@ def post_list_locations( """ return response - def pre_get_iam_policy( + def pre_delete_backup_plan_association( self, - request: iam_policy_pb2.GetIamPolicyRequest, + request: backupplanassociation.DeleteBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_iam_policy + ) -> Tuple[ + backupplanassociation.DeleteBackupPlanAssociationRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete_backup_plan_association Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy + def post_delete_backup_plan_association( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_plan_association Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -271,20 +468,22 @@ def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """ return response - def pre_set_iam_policy( + def pre_delete_backup_vault( self, - request: iam_policy_pb2.SetIamPolicyRequest, + request: backupvault.DeleteBackupVaultRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for set_iam_policy + ) -> Tuple[backupvault.DeleteBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_backup_vault Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy + def post_delete_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup_vault Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -292,22 +491,22 @@ def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """ return response - def pre_test_iam_permissions( + def pre_delete_management_server( self, - request: iam_policy_pb2.TestIamPermissionsRequest, + request: backupdr.DeleteManagementServerRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for test_iam_permissions + ) -> Tuple[backupdr.DeleteManagementServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_management_server Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions + def post_delete_management_server( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_management_server Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -315,20 +514,22 @@ def post_test_iam_permissions( """ return response - def pre_cancel_operation( + def pre_fetch_usable_backup_vaults( self, - request: operations_pb2.CancelOperationRequest, + request: backupvault.FetchUsableBackupVaultsRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation + ) -> Tuple[backupvault.FetchUsableBackupVaultsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_usable_backup_vaults Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_cancel_operation(self, response: None) -> None: - """Post-rpc interceptor for cancel_operation + def post_fetch_usable_backup_vaults( + self, response: backupvault.FetchUsableBackupVaultsResponse + ) -> backupvault.FetchUsableBackupVaultsResponse: + """Post-rpc interceptor for fetch_usable_backup_vaults Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -336,20 +537,18 @@ def post_cancel_operation(self, response: None) -> None: """ return response - def pre_delete_operation( - self, - request: operations_pb2.DeleteOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation + def pre_get_backup( + self, request: backupvault.GetBackupRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[backupvault.GetBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_delete_operation(self, response: None) -> None: - """Post-rpc interceptor for delete_operation + def post_get_backup(self, response: backupvault.Backup) -> backupvault.Backup: + """Post-rpc interceptor for get_backup Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -357,22 +556,22 @@ def post_delete_operation(self, response: None) -> None: """ return response - def pre_get_operation( + def pre_get_backup_plan( self, - request: operations_pb2.GetOperationRequest, + request: backupplan.GetBackupPlanRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation + ) -> Tuple[backupplan.GetBackupPlanRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup_plan Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation + def post_get_backup_plan( + self, response: backupplan.BackupPlan + ) -> backupplan.BackupPlan: + """Post-rpc interceptor for get_backup_plan Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -380,22 +579,24 @@ def post_get_operation( """ return response - def pre_list_operations( + def pre_get_backup_plan_association( self, - request: operations_pb2.ListOperationsRequest, + request: backupplanassociation.GetBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, str]], - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations + ) -> Tuple[ + backupplanassociation.GetBackupPlanAssociationRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_backup_plan_association Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations + def post_get_backup_plan_association( + self, response: backupplanassociation.BackupPlanAssociation + ) -> backupplanassociation.BackupPlanAssociation: + """Post-rpc interceptor for get_backup_plan_association Override in a subclass to manipulate the response after it is returned by the BackupDR server but before @@ -403,164 +604,2784 @@ def post_list_operations( """ return response + def pre_get_backup_vault( + self, + request: backupvault.GetBackupVaultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.GetBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_backup_vault -@dataclasses.dataclass -class BackupDRRestStub: - _session: AuthorizedSession - _host: str - _interceptor: BackupDRRestInterceptor + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + def post_get_backup_vault( + self, response: backupvault.BackupVault + ) -> backupvault.BackupVault: + """Post-rpc interceptor for get_backup_vault -class BackupDRRestTransport(BackupDRTransport): - """REST backend transport for BackupDR. + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response - The BackupDR Service + def pre_get_data_source( + self, + request: backupvault.GetDataSourceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.GetDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_data_source - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata - It sends JSON representations of protocol buffers over HTTP/1.1 + def post_get_data_source( + self, response: backupvault.DataSource + ) -> backupvault.DataSource: + """Post-rpc interceptor for get_data_source - """ + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response - def __init__( + def pre_get_management_server( self, - *, - host: str = "backupdr.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[BackupDRRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. + request: backupdr.GetManagementServerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupdr.GetManagementServerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_management_server - Args: - host (Optional[str]): - The hostname to connect to (default: 'backupdr.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. + def post_get_management_server( + self, response: backupdr.ManagementServer + ) -> backupdr.ManagementServer: + """Post-rpc interceptor for get_management_server + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER + return response - url_match_items = maybe_url_match.groupdict() + def pre_list_backup_plan_associations( + self, + request: backupplanassociation.ListBackupPlanAssociationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + backupplanassociation.ListBackupPlanAssociationsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_backup_plan_associations - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or BackupDRRestInterceptor() - self._prep_wrapped_messages(client_info) + def post_list_backup_plan_associations( + self, response: backupplanassociation.ListBackupPlanAssociationsResponse + ) -> backupplanassociation.ListBackupPlanAssociationsResponse: + """Post-rpc interceptor for list_backup_plan_associations - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response - This property caches on the instance; repeated calls return the same - client. + def pre_list_backup_plans( + self, + request: backupplan.ListBackupPlansRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupplan.ListBackupPlansRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backup_plans + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - "google.longrunning.Operations.CancelOperation": [ - { - "method": "post", - "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", - "body": "*", - }, - ], - "google.longrunning.Operations.DeleteOperation": [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.GetOperation": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/operations/*}", - }, - ], - "google.longrunning.Operations.ListOperations": [ - { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*}/operations", - }, - ], - } + return request, metadata - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1", - ) + def post_list_backup_plans( + self, response: backupplan.ListBackupPlansResponse + ) -> backupplan.ListBackupPlansResponse: + """Post-rpc interceptor for list_backup_plans - self._operations_client = operations_v1.AbstractOperationsClient( - transport=rest_transport - ) + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_backups( + self, + request: backupvault.ListBackupsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.ListBackupsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backups + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_backups( + self, response: backupvault.ListBackupsResponse + ) -> backupvault.ListBackupsResponse: + """Post-rpc interceptor for list_backups + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_backup_vaults( + self, + request: backupvault.ListBackupVaultsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.ListBackupVaultsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_backup_vaults + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_backup_vaults( + self, response: backupvault.ListBackupVaultsResponse + ) -> backupvault.ListBackupVaultsResponse: + """Post-rpc interceptor for list_backup_vaults + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_data_sources( + self, + request: backupvault.ListDataSourcesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.ListDataSourcesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_data_sources + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_data_sources( + self, response: backupvault.ListDataSourcesResponse + ) -> backupvault.ListDataSourcesResponse: + """Post-rpc interceptor for list_data_sources + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_management_servers( + self, + request: backupdr.ListManagementServersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupdr.ListManagementServersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_management_servers + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_management_servers( + self, response: backupdr.ListManagementServersResponse + ) -> backupdr.ListManagementServersResponse: + """Post-rpc interceptor for list_management_servers + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_restore_backup( + self, + request: backupvault.RestoreBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.RestoreBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for restore_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_restore_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_backup + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_trigger_backup( + self, + request: backupplanassociation.TriggerBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupplanassociation.TriggerBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for trigger_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_trigger_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for trigger_backup + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_update_backup( + self, + request: backupvault.UpdateBackupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.UpdateBackupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_update_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_update_backup_vault( + self, + request: backupvault.UpdateBackupVaultRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.UpdateBackupVaultRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_backup_vault + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_update_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup_vault + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_update_data_source( + self, + request: backupvault.UpdateDataSourceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[backupvault.UpdateDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_data_source + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_update_data_source( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_data_source + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BackupDRRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BackupDRRestInterceptor + + +class BackupDRRestTransport(BackupDRTransport): + """REST backend transport for BackupDR. + + The BackupDR Service + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "backupdr.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BackupDRRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'backupdr.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BackupDRRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateBackupPlan(BackupDRRestStub): + def __hash__(self): + return hash("CreateBackupPlan") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupPlanId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.CreateBackupPlanRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup plan method over HTTP. + + Args: + request (~.backupplan.CreateBackupPlanRequest): + The request object. The request message for creating a ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlans", + "body": "backup_plan", + }, + ] + request, metadata = self._interceptor.pre_create_backup_plan( + request, metadata + ) + pb_request = backupplan.CreateBackupPlanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_plan(resp) + return resp + + class _CreateBackupPlanAssociation(BackupDRRestStub): + def __hash__(self): + return hash("CreateBackupPlanAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupPlanAssociationId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.CreateBackupPlanAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup plan + association method over HTTP. + + Args: + request (~.backupplanassociation.CreateBackupPlanAssociationRequest): + The request object. Request message for creating a backup + plan. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlanAssociations", + "body": "backup_plan_association", + }, + ] + request, metadata = self._interceptor.pre_create_backup_plan_association( + request, metadata + ) + pb_request = backupplanassociation.CreateBackupPlanAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_plan_association(resp) + return resp + + class _CreateBackupVault(BackupDRRestStub): + def __hash__(self): + return hash("CreateBackupVault") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupVaultId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.CreateBackupVaultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create backup vault method over HTTP. + + Args: + request (~.backupvault.CreateBackupVaultRequest): + The request object. Message for creating a BackupVault. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults", + "body": "backup_vault", + }, + ] + request, metadata = self._interceptor.pre_create_backup_vault( + request, metadata + ) + pb_request = backupvault.CreateBackupVaultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_backup_vault(resp) + return resp + + class _CreateManagementServer(BackupDRRestStub): + def __hash__(self): + return hash("CreateManagementServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "managementServerId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.CreateManagementServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create management server method over HTTP. + + Args: + request (~.backupdr.CreateManagementServerRequest): + The request object. Request message for creating a + management server instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/managementServers", + "body": "management_server", + }, + ] + request, metadata = self._interceptor.pre_create_management_server( + request, metadata + ) + pb_request = backupdr.CreateManagementServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_management_server(resp) + return resp + + class _DeleteBackup(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.DeleteBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup method over HTTP. + + Args: + request (~.backupvault.DeleteBackupRequest): + The request object. Message for deleting a Backup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup(request, metadata) + pb_request = backupvault.DeleteBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup(resp) + return resp + + class _DeleteBackupPlan(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackupPlan") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.DeleteBackupPlanRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup plan method over HTTP. + + Args: + request (~.backupplan.DeleteBackupPlanRequest): + The request object. The request message for deleting a ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupPlans/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup_plan( + request, metadata + ) + pb_request = backupplan.DeleteBackupPlanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup_plan(resp) + return resp + + class _DeleteBackupPlanAssociation(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackupPlanAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.DeleteBackupPlanAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup plan + association method over HTTP. + + Args: + request (~.backupplanassociation.DeleteBackupPlanAssociationRequest): + The request object. Request message for deleting a backup + plan association. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupPlanAssociations/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup_plan_association( + request, metadata + ) + pb_request = backupplanassociation.DeleteBackupPlanAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup_plan_association(resp) + return resp + + class _DeleteBackupVault(BackupDRRestStub): + def __hash__(self): + return hash("DeleteBackupVault") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.DeleteBackupVaultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup vault method over HTTP. + + Args: + request (~.backupvault.DeleteBackupVaultRequest): + The request object. Message for deleting a BackupVault. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_backup_vault( + request, metadata + ) + pb_request = backupvault.DeleteBackupVaultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_backup_vault(resp) + return resp + + class _DeleteManagementServer(BackupDRRestStub): + def __hash__(self): + return hash("DeleteManagementServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.DeleteManagementServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete management server method over HTTP. + + Args: + request (~.backupdr.DeleteManagementServerRequest): + The request object. Request message for deleting a + management server instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_management_server( + request, metadata + ) + pb_request = backupdr.DeleteManagementServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_management_server(resp) + return resp + + class _FetchUsableBackupVaults(BackupDRRestStub): + def __hash__(self): + return hash("FetchUsableBackupVaults") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.FetchUsableBackupVaultsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.FetchUsableBackupVaultsResponse: + r"""Call the fetch usable backup + vaults method over HTTP. + + Args: + request (~.backupvault.FetchUsableBackupVaultsRequest): + The request object. Request message for fetching usable + BackupVaults. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.FetchUsableBackupVaultsResponse: + Response message for fetching usable + BackupVaults. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults:fetchUsable", + }, + ] + request, metadata = self._interceptor.pre_fetch_usable_backup_vaults( + request, metadata + ) + pb_request = backupvault.FetchUsableBackupVaultsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.FetchUsableBackupVaultsResponse() + pb_resp = backupvault.FetchUsableBackupVaultsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_usable_backup_vaults(resp) + return resp + + class _GetBackup(BackupDRRestStub): + def __hash__(self): + return hash("GetBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.GetBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.Backup: + r"""Call the get backup method over HTTP. + + Args: + request (~.backupvault.GetBackupRequest): + The request object. Request message for getting a Backup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.Backup: + Message describing a Backup object. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup(request, metadata) + pb_request = backupvault.GetBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.Backup() + pb_resp = backupvault.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup(resp) + return resp + + class _GetBackupPlan(BackupDRRestStub): + def __hash__(self): + return hash("GetBackupPlan") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.GetBackupPlanRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.BackupPlan: + r"""Call the get backup plan method over HTTP. + + Args: + request (~.backupplan.GetBackupPlanRequest): + The request object. The request message for getting a ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplan.BackupPlan: + A ``BackupPlan`` specifies some common fields, such as + ``description`` as well as one or more ``BackupRule`` + messages. Each ``BackupRule`` has a retention policy and + defines a schedule by which the system is to perform + backup workloads. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupPlans/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup_plan(request, metadata) + pb_request = backupplan.GetBackupPlanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplan.BackupPlan() + pb_resp = backupplan.BackupPlan.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_plan(resp) + return resp + + class _GetBackupPlanAssociation(BackupDRRestStub): + def __hash__(self): + return hash("GetBackupPlanAssociation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.GetBackupPlanAssociationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.BackupPlanAssociation: + r"""Call the get backup plan + association method over HTTP. + + Args: + request (~.backupplanassociation.GetBackupPlanAssociationRequest): + The request object. Request message for getting a + BackupPlanAssociation resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplanassociation.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupPlanAssociations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup_plan_association( + request, metadata + ) + pb_request = backupplanassociation.GetBackupPlanAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplanassociation.BackupPlanAssociation() + pb_resp = backupplanassociation.BackupPlanAssociation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_plan_association(resp) + return resp + + class _GetBackupVault(BackupDRRestStub): + def __hash__(self): + return hash("GetBackupVault") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.GetBackupVaultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.BackupVault: + r"""Call the get backup vault method over HTTP. + + Args: + request (~.backupvault.GetBackupVaultRequest): + The request object. Request message for getting a + BackupVault. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.BackupVault: + Message describing a BackupVault + object. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*}", + }, + ] + request, metadata = self._interceptor.pre_get_backup_vault( + request, metadata + ) + pb_request = backupvault.GetBackupVaultRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.BackupVault() + pb_resp = backupvault.BackupVault.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_backup_vault(resp) + return resp + + class _GetDataSource(BackupDRRestStub): + def __hash__(self): + return hash("GetDataSource") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.GetDataSourceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.DataSource: + r"""Call the get data source method over HTTP. + + Args: + request (~.backupvault.GetDataSourceRequest): + The request object. Request message for getting a + DataSource instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.DataSource: + Message describing a DataSource + object. Datasource object used to + represent Datasource details for both + admin and basic view. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}", + }, + ] + request, metadata = self._interceptor.pre_get_data_source(request, metadata) + pb_request = backupvault.GetDataSourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.DataSource() + pb_resp = backupvault.DataSource.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_data_source(resp) + return resp + + class _GetManagementServer(BackupDRRestStub): + def __hash__(self): + return hash("GetManagementServer") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.GetManagementServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupdr.ManagementServer: + r"""Call the get management server method over HTTP. + + Args: + request (~.backupdr.GetManagementServerRequest): + The request object. Request message for getting a + management server instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupdr.ManagementServer: + ManagementServer describes a single + BackupDR ManagementServer instance. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + }, + ] + request, metadata = self._interceptor.pre_get_management_server( + request, metadata + ) + pb_request = backupdr.GetManagementServerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupdr.ManagementServer() + pb_resp = backupdr.ManagementServer.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_management_server(resp) + return resp + + class _ListBackupPlanAssociations(BackupDRRestStub): + def __hash__(self): + return hash("ListBackupPlanAssociations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplanassociation.ListBackupPlanAssociationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplanassociation.ListBackupPlanAssociationsResponse: + r"""Call the list backup plan + associations method over HTTP. + + Args: + request (~.backupplanassociation.ListBackupPlanAssociationsRequest): + The request object. Request message for List + BackupPlanAssociation + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplanassociation.ListBackupPlanAssociationsResponse: + Response message for List + BackupPlanAssociation + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlanAssociations", + }, + ] + request, metadata = self._interceptor.pre_list_backup_plan_associations( + request, metadata + ) + pb_request = backupplanassociation.ListBackupPlanAssociationsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplanassociation.ListBackupPlanAssociationsResponse() + pb_resp = backupplanassociation.ListBackupPlanAssociationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_plan_associations(resp) + return resp + + class _ListBackupPlans(BackupDRRestStub): + def __hash__(self): + return hash("ListBackupPlans") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupplan.ListBackupPlansRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupplan.ListBackupPlansResponse: + r"""Call the list backup plans method over HTTP. + + Args: + request (~.backupplan.ListBackupPlansRequest): + The request object. The request message for getting a list ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupplan.ListBackupPlansResponse: + The response message for getting a list of + ``BackupPlan``. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlans", + }, + ] + request, metadata = self._interceptor.pre_list_backup_plans( + request, metadata + ) + pb_request = backupplan.ListBackupPlansRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplan.ListBackupPlansResponse() + pb_resp = backupplan.ListBackupPlansResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_plans(resp) + return resp + + class _ListBackups(BackupDRRestStub): + def __hash__(self): + return hash("ListBackups") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.ListBackupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.ListBackupsResponse: + r"""Call the list backups method over HTTP. + + Args: + request (~.backupvault.ListBackupsRequest): + The request object. Request message for listing Backups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.ListBackupsResponse: + Response message for listing Backups. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups", + }, + ] + request, metadata = self._interceptor.pre_list_backups(request, metadata) + pb_request = backupvault.ListBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.ListBackupsResponse() + pb_resp = backupvault.ListBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backups(resp) + return resp + + class _ListBackupVaults(BackupDRRestStub): + def __hash__(self): + return hash("ListBackupVaults") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.ListBackupVaultsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.ListBackupVaultsResponse: + r"""Call the list backup vaults method over HTTP. + + Args: + request (~.backupvault.ListBackupVaultsRequest): + The request object. Request message for listing + backupvault stores. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.ListBackupVaultsResponse: + Response message for listing + BackupVaults. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupVaults", + }, + ] + request, metadata = self._interceptor.pre_list_backup_vaults( + request, metadata + ) + pb_request = backupvault.ListBackupVaultsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.ListBackupVaultsResponse() + pb_resp = backupvault.ListBackupVaultsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_backup_vaults(resp) + return resp + + class _ListDataSources(BackupDRRestStub): + def __hash__(self): + return hash("ListDataSources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.ListDataSourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupvault.ListDataSourcesResponse: + r"""Call the list data sources method over HTTP. + + Args: + request (~.backupvault.ListDataSourcesRequest): + The request object. Request message for listing + DataSources. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupvault.ListDataSourcesResponse: + Response message for listing + DataSources. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/backupVaults/*}/dataSources", + }, + ] + request, metadata = self._interceptor.pre_list_data_sources( + request, metadata + ) + pb_request = backupvault.ListDataSourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.ListDataSourcesResponse() + pb_resp = backupvault.ListDataSourcesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_data_sources(resp) + return resp + + class _ListManagementServers(BackupDRRestStub): + def __hash__(self): + return hash("ListManagementServers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupdr.ListManagementServersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> backupdr.ListManagementServersResponse: + r"""Call the list management servers method over HTTP. + + Args: + request (~.backupdr.ListManagementServersRequest): + The request object. Request message for listing + management servers. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.backupdr.ListManagementServersResponse: + Response message for listing + management servers. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/managementServers", + }, + ] + request, metadata = self._interceptor.pre_list_management_servers( + request, metadata + ) + pb_request = backupdr.ListManagementServersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupdr.ListManagementServersResponse() + pb_resp = backupdr.ListManagementServersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_management_servers(resp) + return resp + + class _RestoreBackup(BackupDRRestStub): + def __hash__(self): + return hash("RestoreBackup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: backupvault.RestoreBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the restore backup method over HTTP. + + Args: + request (~.backupvault.RestoreBackupRequest): + The request object. Request message for restoring from a + Backup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}:restore", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_restore_backup(request, metadata) + pb_request = backupvault.RestoreBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) - # Return the client from cache. - return self._operations_client + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_restore_backup(resp) + return resp - class _CreateManagementServer(BackupDRRestStub): + class _TriggerBackup(BackupDRRestStub): def __hash__(self): - return hash("CreateManagementServer") + return hash("TriggerBackup") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "managementServerId": "", - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): @@ -572,18 +3393,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.CreateManagementServerRequest, + request: backupplanassociation.TriggerBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the create management server method over HTTP. + r"""Call the trigger backup method over HTTP. Args: - request (~.backupdr.CreateManagementServerRequest): - The request object. Request message for creating a - management server instance. + request (~.backupplanassociation.TriggerBackupRequest): + The request object. Request message for triggering a + backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -601,14 +3422,12 @@ def __call__( http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1/{parent=projects/*/locations/*}/managementServers", - "body": "management_server", + "uri": "/v1/{name=projects/*/locations/*/backupPlanAssociations/*}:triggerBackup", + "body": "*", }, ] - request, metadata = self._interceptor.pre_create_management_server( - request, metadata - ) - pb_request = backupdr.CreateManagementServerRequest.pb(request) + request, metadata = self._interceptor.pre_trigger_backup(request, metadata) + pb_request = backupplanassociation.TriggerBackupRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) # Jsonify the request body @@ -649,14 +3468,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_management_server(resp) + resp = self._interceptor.post_trigger_backup(resp) return resp - class _DeleteManagementServer(BackupDRRestStub): + class _UpdateBackup(BackupDRRestStub): def __hash__(self): - return hash("DeleteManagementServer") + return hash("UpdateBackup") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -668,18 +3489,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.DeleteManagementServerRequest, + request: backupvault.UpdateBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the delete management server method over HTTP. + r"""Call the update backup method over HTTP. Args: - request (~.backupdr.DeleteManagementServerRequest): - The request object. Request message for deleting a - management server instance. + request (~.backupvault.UpdateBackupRequest): + The request object. Request message for updating a + Backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -696,16 +3517,20 @@ def __call__( http_options: List[Dict[str, str]] = [ { - "method": "delete", - "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + "method": "patch", + "uri": "/v1/{backup.name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}", + "body": "backup", }, ] - request, metadata = self._interceptor.pre_delete_management_server( - request, metadata - ) - pb_request = backupdr.DeleteManagementServerRequest.pb(request) + request, metadata = self._interceptor.pre_update_backup(request, metadata) + pb_request = backupvault.UpdateBackupRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -728,6 +3553,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -738,14 +3564,16 @@ def __call__( # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_management_server(resp) + resp = self._interceptor.post_update_backup(resp) return resp - class _GetManagementServer(BackupDRRestStub): + class _UpdateBackupVault(BackupDRRestStub): def __hash__(self): - return hash("GetManagementServer") + return hash("UpdateBackupVault") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -757,18 +3585,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.GetManagementServerRequest, + request: backupvault.UpdateBackupVaultRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> backupdr.ManagementServer: - r"""Call the get management server method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update backup vault method over HTTP. Args: - request (~.backupdr.GetManagementServerRequest): - The request object. Request message for getting a - management server instance. + request (~.backupvault.UpdateBackupVaultRequest): + The request object. Request message for updating a + BackupVault. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -776,24 +3604,31 @@ def __call__( sent along with the request as metadata. Returns: - ~.backupdr.ManagementServer: - ManagementServer describes a single - BackupDR ManagementServer instance. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{name=projects/*/locations/*/managementServers/*}", + "method": "patch", + "uri": "/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}", + "body": "backup_vault", }, ] - request, metadata = self._interceptor.pre_get_management_server( + request, metadata = self._interceptor.pre_update_backup_vault( request, metadata ) - pb_request = backupdr.GetManagementServerRequest.pb(request) + pb_request = backupvault.UpdateBackupVaultRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -816,6 +3651,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -824,18 +3660,18 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupdr.ManagementServer() - pb_resp = backupdr.ManagementServer.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_management_server(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_backup_vault(resp) return resp - class _ListManagementServers(BackupDRRestStub): + class _UpdateDataSource(BackupDRRestStub): def __hash__(self): - return hash("ListManagementServers") + return hash("UpdateDataSource") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): @@ -847,18 +3683,18 @@ def _get_unset_required_fields(cls, message_dict): def __call__( self, - request: backupdr.ListManagementServersRequest, + request: backupvault.UpdateDataSourceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> backupdr.ListManagementServersResponse: - r"""Call the list management servers method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update data source method over HTTP. Args: - request (~.backupdr.ListManagementServersRequest): - The request object. Request message for listing - management servers. + request (~.backupvault.UpdateDataSourceRequest): + The request object. Request message for updating a data + source instance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -866,24 +3702,31 @@ def __call__( sent along with the request as metadata. Returns: - ~.backupdr.ListManagementServersResponse: - Response message for listing - management servers. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [ { - "method": "get", - "uri": "/v1/{parent=projects/*/locations/*}/managementServers", + "method": "patch", + "uri": "/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}", + "body": "data_source", }, ] - request, metadata = self._interceptor.pre_list_management_servers( + request, metadata = self._interceptor.pre_update_data_source( request, metadata ) - pb_request = backupdr.ListManagementServersRequest.pb(request) + pb_request = backupvault.UpdateDataSourceRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -906,6 +3749,7 @@ def __call__( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -914,13 +3758,38 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupdr.ListManagementServersResponse() - pb_resp = backupdr.ListManagementServersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_management_servers(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_data_source(resp) return resp + @property + def create_backup_plan( + self, + ) -> Callable[[backupplan.CreateBackupPlanRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupPlan(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.CreateBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup_vault( + self, + ) -> Callable[[backupvault.CreateBackupVaultRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property def create_management_server( self, @@ -929,6 +3798,41 @@ def create_management_server( # In C++ this would require a dynamic_cast return self._CreateManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_backup( + self, + ) -> Callable[[backupvault.DeleteBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_plan( + self, + ) -> Callable[[backupplan.DeleteBackupPlanRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupPlan(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.DeleteBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_vault( + self, + ) -> Callable[[backupvault.DeleteBackupVaultRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupVault(self._session, self._host, self._interceptor) # type: ignore + @property def delete_management_server( self, @@ -937,6 +3841,60 @@ def delete_management_server( # In C++ this would require a dynamic_cast return self._DeleteManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def fetch_usable_backup_vaults( + self, + ) -> Callable[ + [backupvault.FetchUsableBackupVaultsRequest], + backupvault.FetchUsableBackupVaultsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchUsableBackupVaults(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup( + self, + ) -> Callable[[backupvault.GetBackupRequest], backupvault.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_plan( + self, + ) -> Callable[[backupplan.GetBackupPlanRequest], backupplan.BackupPlan]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupPlan(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.GetBackupPlanAssociationRequest], + backupplanassociation.BackupPlanAssociation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_vault( + self, + ) -> Callable[[backupvault.GetBackupVaultRequest], backupvault.BackupVault]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupVault(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_source( + self, + ) -> Callable[[backupvault.GetDataSourceRequest], backupvault.DataSource]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataSource(self._session, self._host, self._interceptor) # type: ignore + @property def get_management_server( self, @@ -945,6 +3903,55 @@ def get_management_server( # In C++ this would require a dynamic_cast return self._GetManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def list_backup_plan_associations( + self, + ) -> Callable[ + [backupplanassociation.ListBackupPlanAssociationsRequest], + backupplanassociation.ListBackupPlanAssociationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupPlanAssociations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_plans( + self, + ) -> Callable[ + [backupplan.ListBackupPlansRequest], backupplan.ListBackupPlansResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupPlans(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backups( + self, + ) -> Callable[[backupvault.ListBackupsRequest], backupvault.ListBackupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_vaults( + self, + ) -> Callable[ + [backupvault.ListBackupVaultsRequest], backupvault.ListBackupVaultsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupVaults(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_sources( + self, + ) -> Callable[ + [backupvault.ListDataSourcesRequest], backupvault.ListDataSourcesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataSources(self._session, self._host, self._interceptor) # type: ignore + @property def list_management_servers( self, @@ -955,6 +3962,48 @@ def list_management_servers( # In C++ this would require a dynamic_cast return self._ListManagementServers(self._session, self._host, self._interceptor) # type: ignore + @property + def restore_backup( + self, + ) -> Callable[[backupvault.RestoreBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def trigger_backup( + self, + ) -> Callable[ + [backupplanassociation.TriggerBackupRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TriggerBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup( + self, + ) -> Callable[[backupvault.UpdateBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup_vault( + self, + ) -> Callable[[backupvault.UpdateBackupVaultRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackupVault(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_data_source( + self, + ) -> Callable[[backupvault.UpdateDataSourceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataSource(self._session, self._host, self._interceptor) # type: ignore + @property def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py index 3afc31268ba2..951186d655ee 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py @@ -26,6 +26,92 @@ WorkforceIdentityBasedManagementURI, WorkforceIdentityBasedOAuth2ClientID, ) +from .backupplan import ( + BackupPlan, + BackupRule, + BackupWindow, + CreateBackupPlanRequest, + DeleteBackupPlanRequest, + GetBackupPlanRequest, + ListBackupPlansRequest, + ListBackupPlansResponse, + StandardSchedule, + WeekDayOfMonth, +) +from .backupplanassociation import ( + BackupPlanAssociation, + CreateBackupPlanAssociationRequest, + DeleteBackupPlanAssociationRequest, + GetBackupPlanAssociationRequest, + ListBackupPlanAssociationsRequest, + ListBackupPlanAssociationsResponse, + RuleConfigInfo, + TriggerBackupRequest, +) +from .backupvault import ( + Backup, + BackupApplianceBackupConfig, + BackupApplianceLockInfo, + BackupConfigInfo, + BackupConfigState, + BackupLock, + BackupVault, + BackupVaultView, + BackupView, + CreateBackupVaultRequest, + DataSource, + DataSourceBackupApplianceApplication, + DataSourceGcpResource, + DeleteBackupRequest, + DeleteBackupVaultRequest, + FetchUsableBackupVaultsRequest, + FetchUsableBackupVaultsResponse, + GcpBackupConfig, + GcpResource, + GetBackupRequest, + GetBackupVaultRequest, + GetDataSourceRequest, + ListBackupsRequest, + ListBackupsResponse, + ListBackupVaultsRequest, + ListBackupVaultsResponse, + ListDataSourcesRequest, + ListDataSourcesResponse, + RestoreBackupRequest, + RestoreBackupResponse, + ServiceLockInfo, + TargetResource, + UpdateBackupRequest, + UpdateBackupVaultRequest, + UpdateDataSourceRequest, +) +from .backupvault_ba import BackupApplianceBackupProperties +from .backupvault_gce import ( + AcceleratorConfig, + AccessConfig, + AdvancedMachineFeatures, + AliasIpRange, + AllocationAffinity, + AttachedDisk, + ComputeInstanceBackupProperties, + ComputeInstanceDataSourceProperties, + ComputeInstanceRestoreProperties, + ComputeInstanceTargetEnvironment, + ConfidentialInstanceConfig, + CustomerEncryptionKey, + DisplayDevice, + Entry, + GuestOsFeature, + InstanceParams, + KeyRevocationActionType, + Metadata, + NetworkInterface, + NetworkPerformanceConfig, + Scheduling, + SchedulingDuration, + ServiceAccount, + Tags, +) __all__ = ( "CreateManagementServerRequest", @@ -39,4 +125,82 @@ "OperationMetadata", "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", + "BackupPlan", + "BackupRule", + "BackupWindow", + "CreateBackupPlanRequest", + "DeleteBackupPlanRequest", + "GetBackupPlanRequest", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "StandardSchedule", + "WeekDayOfMonth", + "BackupPlanAssociation", + "CreateBackupPlanAssociationRequest", + "DeleteBackupPlanAssociationRequest", + "GetBackupPlanAssociationRequest", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "RuleConfigInfo", + "TriggerBackupRequest", + "Backup", + "BackupApplianceBackupConfig", + "BackupApplianceLockInfo", + "BackupConfigInfo", + "BackupLock", + "BackupVault", + "CreateBackupVaultRequest", + "DataSource", + "DataSourceBackupApplianceApplication", + "DataSourceGcpResource", + "DeleteBackupRequest", + "DeleteBackupVaultRequest", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GcpBackupConfig", + "GcpResource", + "GetBackupRequest", + "GetBackupVaultRequest", + "GetDataSourceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "RestoreBackupRequest", + "RestoreBackupResponse", + "ServiceLockInfo", + "TargetResource", + "UpdateBackupRequest", + "UpdateBackupVaultRequest", + "UpdateDataSourceRequest", + "BackupConfigState", + "BackupVaultView", + "BackupView", + "BackupApplianceBackupProperties", + "AcceleratorConfig", + "AccessConfig", + "AdvancedMachineFeatures", + "AliasIpRange", + "AllocationAffinity", + "AttachedDisk", + "ComputeInstanceBackupProperties", + "ComputeInstanceDataSourceProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ConfidentialInstanceConfig", + "CustomerEncryptionKey", + "DisplayDevice", + "Entry", + "GuestOsFeature", + "InstanceParams", + "Metadata", + "NetworkInterface", + "NetworkPerformanceConfig", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "Tags", + "KeyRevocationActionType", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py index 07ad09a753bc..2cbce001d7d5 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py @@ -195,7 +195,7 @@ class ManagementServer(proto.Message): oauth2_client_id (str): Output only. The OAuth 2.0 client id is required to make API calls to the BackupDR instance API of this ManagementServer. - This is the value that should be provided in the ‘aud’ field + This is the value that should be provided in the 'aud' field of the OIDC ID Token (see openid specification https://openid.net/specs/openid-connect-core-1_0.html#IDToken). workforce_identity_based_oauth2_client_id (google.cloud.backupdr_v1.types.WorkforceIdentityBasedOAuth2ClientID): @@ -350,10 +350,10 @@ class ListManagementServersRequest(proto.Message): parent (str): Required. The project and location for which to retrieve management servers information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example + 'projects/{project_id}/locations/{location}'. In Cloud + BackupDR, locations map to Google Cloud regions, for example **us-central1**. To retrieve management servers for all - locations, use "-" for the ``{location}`` value. + locations, use "-" for the '{location}' value. page_size (int): Optional. Requested page size. Server may return fewer items than requested. If @@ -401,15 +401,16 @@ class ListManagementServersResponse(proto.Message): Attributes: management_servers (MutableSequence[google.cloud.backupdr_v1.types.ManagementServer]): - The list of ManagementServer instances in the project for - the specified location. - - If the ``{location}`` value in the request is "-", the - response contains a list of instances from all locations. In - case any location is unreachable, the response will only - return management servers in reachable locations and the - 'unreachable' field will be populated with a list of - unreachable locations. + The list of ManagementServer instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return + management servers in reachable locations and + the 'unreachable' field will be populated with a + list of unreachable locations. next_page_token (str): A token identifying a page of results the server should return. @@ -443,7 +444,7 @@ class GetManagementServerRequest(proto.Message): name (str): Required. Name of the management server resource name, in the format - ``projects/{project_id}/locations/{location}/managementServers/{resource_name}`` + 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' """ name: str = proto.Field( @@ -458,9 +459,9 @@ class CreateManagementServerRequest(proto.Message): Attributes: parent (str): Required. The management server project and location in the - format ``projects/{project_id}/locations/{location}``. In - Cloud Backup and DR locations map to GCP regions, for - example **us-central1**. + format 'projects/{project_id}/locations/{location}'. In + Cloud Backup and DR locations map to Google Cloud regions, + for example **us-central1**. management_server_id (str): Required. The name of the management server to create. The name must be unique for the @@ -571,7 +572,7 @@ class OperationMetadata(proto.Message): cancellation of the operation. Operations that have successfully been cancelled have [Operation.error][] value with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. + 1, corresponding to 'Code.CANCELLED'. api_version (str): Output only. API version used to start the operation. diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py new file mode 100644 index 000000000000..ca0eabb6d9f1 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py @@ -0,0 +1,644 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupPlan", + "BackupRule", + "StandardSchedule", + "BackupWindow", + "WeekDayOfMonth", + "CreateBackupPlanRequest", + "ListBackupPlansRequest", + "ListBackupPlansResponse", + "GetBackupPlanRequest", + "DeleteBackupPlanRequest", + }, +) + + +class BackupPlan(proto.Message): + r"""A ``BackupPlan`` specifies some common fields, such as + ``description`` as well as one or more ``BackupRule`` messages. Each + ``BackupRule`` has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + Attributes: + name (str): + Output only. Identifier. The resource name of the + ``BackupPlan``. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + description (str): + Optional. The description of the ``BackupPlan`` resource. + + The description allows for additional details about + ``BackupPlan`` and its use cases to be provided. An example + description is the following: "This is a backup plan that + performs a daily backup at 6pm and retains data for 3 + months". The description must be at most 2048 characters. + labels (MutableMapping[str, str]): + Optional. This collection of key/value pairs + allows for custom labels to be supplied by the + user. Example, {"tag": "Weekly"}. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. When the ``BackupPlan`` was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. When the ``BackupPlan`` was last updated. + backup_rules (MutableSequence[google.cloud.backupdr_v1.types.BackupRule]): + Required. The backup rules for this ``BackupPlan``. There + must be at least one ``BackupRule`` message. + state (google.cloud.backupdr_v1.types.BackupPlan.State): + Output only. The ``State`` for the ``BackupPlan``. + resource_type (str): + Required. The resource type to which the ``BackupPlan`` will + be applied. Examples include, + "compute.googleapis.com/Instance" and + "storage.googleapis.com/Bucket". + etag (str): + Optional. ``etag`` is returned from the service in the + response. As a user of the service, you may provide an etag + value in this field to prevent stale resources. + backup_vault (str): + Required. Resource name of backup vault which + will be used as storage location for backups. + Format: + + projects/{project}/locations/{location}/backupVaults/{backupvault} + backup_vault_service_account (str): + Output only. The Google Cloud Platform + Service Account to be used by the BackupVault + for taking backups. Specify the email address of + the Backup Vault Service Account. + """ + + class State(proto.Enum): + r"""``State`` enumerates the possible states for a ``BackupPlan``. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The resource is being created. + ACTIVE (2): + The resource has been created and is fully + usable. + DELETING (3): + The resource is being deleted. + INACTIVE (4): + The resource has been created but is not + usable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + INACTIVE = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + backup_rules: MutableSequence["BackupRule"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="BackupRule", + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + resource_type: str = proto.Field( + proto.STRING, + number=8, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + backup_vault: str = proto.Field( + proto.STRING, + number=10, + ) + backup_vault_service_account: str = proto.Field( + proto.STRING, + number=11, + ) + + +class BackupRule(proto.Message): + r"""``BackupRule`` binds the backup schedule to a retention policy. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + rule_id (str): + Required. Immutable. The unique id of this ``BackupRule``. + The ``rule_id`` is unique per ``BackupPlan``.The ``rule_id`` + must start with a lowercase letter followed by up to 62 + lowercase letters, numbers, or hyphens. Pattern, + /[a-z][a-z0-9-]{,62}/. + backup_retention_days (int): + Required. Configures the duration for which + backup data will be kept. It is defined in + “days”. The value should be greater than or + equal to minimum enforced retention of the + backup vault. + standard_schedule (google.cloud.backupdr_v1.types.StandardSchedule): + Required. Defines a schedule that runs within + the confines of a defined window of time. + + This field is a member of `oneof`_ ``backup_schedule_oneof``. + """ + + rule_id: str = proto.Field( + proto.STRING, + number=1, + ) + backup_retention_days: int = proto.Field( + proto.INT32, + number=4, + ) + standard_schedule: "StandardSchedule" = proto.Field( + proto.MESSAGE, + number=5, + oneof="backup_schedule_oneof", + message="StandardSchedule", + ) + + +class StandardSchedule(proto.Message): + r"""``StandardSchedule`` defines a schedule that run within the confines + of a defined window of days. We can define recurrence type for + schedule as HOURLY, DAILY, WEEKLY, MONTHLY or YEARLY. + + Attributes: + recurrence_type (google.cloud.backupdr_v1.types.StandardSchedule.RecurrenceType): + Required. Specifies the ``RecurrenceType`` for the schedule. + hourly_frequency (int): + Optional. Specifies frequency for hourly backups. A hourly + frequency of 2 means jobs will run every 2 hours from start + time till end time defined. + + This is required for ``recurrence_type``, ``HOURLY`` and is + not applicable otherwise. A validation error will occur if a + value is supplied and ``recurrence_type`` is not ``HOURLY``. + + Value of hourly frequency should be between 6 and 23. + + Reason for limit : We found that there is bandwidth + limitation of 3GB/S for GMI while taking a backup and 5GB/S + while doing a restore. Given the amount of parallel backups + and restore we are targeting, this will potentially take the + backup time to mins and hours (in worst case scenario). + days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): + Optional. Specifies days of week like, MONDAY or TUESDAY, on + which jobs will run. + + This is required for ``recurrence_type``, ``WEEKLY`` and is + not applicable otherwise. A validation error will occur if a + value is supplied and ``recurrence_type`` is not ``WEEKLY``. + days_of_month (MutableSequence[int]): + Optional. Specifies days of months like 1, 5, or 14 on which + jobs will run. + + Values for ``days_of_month`` are only applicable for + ``recurrence_type``, ``MONTHLY`` and ``YEARLY``. A + validation error will occur if other values are supplied. + week_day_of_month (google.cloud.backupdr_v1.types.WeekDayOfMonth): + Optional. Specifies a week day of the month like, FIRST + SUNDAY or LAST MONDAY, on which jobs will run. This will be + specified by two fields in ``WeekDayOfMonth``, one for the + day, e.g. ``MONDAY``, and one for the week, e.g. ``LAST``. + + This field is only applicable for ``recurrence_type``, + ``MONTHLY`` and ``YEARLY``. A validation error will occur if + other values are supplied. + months (MutableSequence[google.type.month_pb2.Month]): + Optional. Specifies the months of year, like ``FEBRUARY`` + and/or ``MAY``, on which jobs will run. + + This field is only applicable when ``recurrence_type`` is + ``YEARLY``. A validation error will occur if other values + are supplied. + backup_window (google.cloud.backupdr_v1.types.BackupWindow): + Required. A BackupWindow defines the window of day during + which backup jobs will run. Jobs are queued at the beginning + of the window and will be marked as ``NOT_RUN`` if they do + not start by the end of the window. + + Note: running jobs will not be cancelled at the end of the + window. + time_zone (str): + Required. The time zone to be used when interpreting the + schedule. The value of this field must be a time zone name + from the IANA tz database. See + https://en.wikipedia.org/wiki/List_of_tz_database_time_zones + for the list of valid timezone names. For e.g., + Europe/Paris. + """ + + class RecurrenceType(proto.Enum): + r"""``RecurrenceTypes`` enumerates the applicable periodicity for the + schedule. + + Values: + RECURRENCE_TYPE_UNSPECIFIED (0): + recurrence type not set + HOURLY (1): + The ``BackupRule`` is to be applied hourly. + DAILY (2): + The ``BackupRule`` is to be applied daily. + WEEKLY (3): + The ``BackupRule`` is to be applied weekly. + MONTHLY (4): + The ``BackupRule`` is to be applied monthly. + YEARLY (5): + The ``BackupRule`` is to be applied yearly. + """ + RECURRENCE_TYPE_UNSPECIFIED = 0 + HOURLY = 1 + DAILY = 2 + WEEKLY = 3 + MONTHLY = 4 + YEARLY = 5 + + recurrence_type: RecurrenceType = proto.Field( + proto.ENUM, + number=1, + enum=RecurrenceType, + ) + hourly_frequency: int = proto.Field( + proto.INT32, + number=2, + ) + days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( + proto.ENUM, + number=3, + enum=dayofweek_pb2.DayOfWeek, + ) + days_of_month: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=4, + ) + week_day_of_month: "WeekDayOfMonth" = proto.Field( + proto.MESSAGE, + number=5, + message="WeekDayOfMonth", + ) + months: MutableSequence[month_pb2.Month] = proto.RepeatedField( + proto.ENUM, + number=6, + enum=month_pb2.Month, + ) + backup_window: "BackupWindow" = proto.Field( + proto.MESSAGE, + number=7, + message="BackupWindow", + ) + time_zone: str = proto.Field( + proto.STRING, + number=8, + ) + + +class BackupWindow(proto.Message): + r"""``BackupWindow`` defines a window of the day during which backup + jobs will run. + + Attributes: + start_hour_of_day (int): + Required. The hour of day (0-23) when the + window starts for e.g. if value of start hour of + day is 6 that mean backup window start at 6:00. + end_hour_of_day (int): + Required. The hour of day (1-24) when the window end for + e.g. if value of end hour of day is 10 that mean backup + window end time is 10:00. + + End hour of day should be greater than start hour of day. 0 + <= start_hour_of_day < end_hour_of_day <= 24 + + End hour of day is not include in backup window that mean if + end_hour_of_day= 10 jobs should start before 10:00. + """ + + start_hour_of_day: int = proto.Field( + proto.INT32, + number=1, + ) + end_hour_of_day: int = proto.Field( + proto.INT32, + number=2, + ) + + +class WeekDayOfMonth(proto.Message): + r"""``WeekDayOfMonth`` defines the week day of the month on which the + backups will run. The message combines a ``WeekOfMonth`` and + ``DayOfWeek`` to produce values like ``FIRST``/``MONDAY`` or + ``LAST``/``FRIDAY``. + + Attributes: + week_of_month (google.cloud.backupdr_v1.types.WeekDayOfMonth.WeekOfMonth): + Required. Specifies the week of the month. + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Required. Specifies the day of the week. + """ + + class WeekOfMonth(proto.Enum): + r"""``WeekOfMonth`` enumerates possible weeks in the month, e.g. the + first, third, or last week of the month. + + Values: + WEEK_OF_MONTH_UNSPECIFIED (0): + The zero value. Do not use. + FIRST (1): + The first week of the month. + SECOND (2): + The second week of the month. + THIRD (3): + The third week of the month. + FOURTH (4): + The fourth week of the month. + LAST (5): + The last week of the month. + """ + WEEK_OF_MONTH_UNSPECIFIED = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + FOURTH = 4 + LAST = 5 + + week_of_month: WeekOfMonth = proto.Field( + proto.ENUM, + number=1, + enum=WeekOfMonth, + ) + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + + +class CreateBackupPlanRequest(proto.Message): + r"""The request message for creating a ``BackupPlan``. + + Attributes: + parent (str): + Required. The ``BackupPlan`` project and location in the + format ``projects/{project}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + backup_plan_id (str): + Required. The name of the ``BackupPlan`` to create. The name + must be unique for the specified project and location.The + name must start with a lowercase letter followed by up to 62 + lowercase letters, numbers, or hyphens. Pattern, + /[a-z][a-z0-9-]{,62}/. + backup_plan (google.cloud.backupdr_v1.types.BackupPlan): + Required. The ``BackupPlan`` resource object to create. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_plan: "BackupPlan" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupPlan", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupPlansRequest(proto.Message): + r"""The request message for getting a list ``BackupPlan``. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + ``BackupPlans`` information. Format: + ``projects/{project}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for e.g. + **us-central1**. To retrieve backup plans for all locations, + use "-" for the ``{location}`` value. + page_size (int): + Optional. The maximum number of ``BackupPlans`` to return in + a single response. If not specified, a default value will be + chosen by the service. Note that the response may include a + partial list and a caller should only rely on the response's + [next_page_token][google.cloud.backupdr.v1.ListBackupPlansResponse.next_page_token] + to determine if there are more instances left to be queried. + page_token (str): + Optional. The value of + [next_page_token][google.cloud.backupdr.v1.ListBackupPlansResponse.next_page_token] + received from a previous ``ListBackupPlans`` call. Provide + this to retrieve the subsequent page in a multi-page list of + results. When paginating, all other parameters provided to + ``ListBackupPlans`` must match the call that provided the + page token. + filter (str): + Optional. Field match expression used to + filter the results. + order_by (str): + Optional. Field by which to sort the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListBackupPlansResponse(proto.Message): + r"""The response message for getting a list of ``BackupPlan``. + + Attributes: + backup_plans (MutableSequence[google.cloud.backupdr_v1.types.BackupPlan]): + The list of ``BackupPlans`` in the project for the specified + location. + + If the ``{location}`` value in the request is "-", the + response contains a list of resources from all locations. In + case any location is unreachable, the response will only + return backup plans in reachable locations and the + 'unreachable' field will be populated with a list of + unreachable locations. BackupPlan + next_page_token (str): + A token which may be sent as + [page_token][google.cloud.backupdr.v1.ListBackupPlansRequest.page_token] + in a subsequent ``ListBackupPlans`` call to retrieve the + next page of results. If this field is omitted or empty, + then there are no more results to return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_plans: MutableSequence["BackupPlan"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupPlan", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupPlanRequest(proto.Message): + r"""The request message for getting a ``BackupPlan``. + + Attributes: + name (str): + Required. The resource name of the ``BackupPlan`` to + retrieve. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupPlanRequest(proto.Message): + r"""The request message for deleting a ``BackupPlan``. + + Attributes: + name (str): + Required. The resource name of the ``BackupPlan`` to delete. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py new file mode 100644 index 000000000000..23a4309a3fd8 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py @@ -0,0 +1,454 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupPlanAssociation", + "RuleConfigInfo", + "CreateBackupPlanAssociationRequest", + "ListBackupPlanAssociationsRequest", + "ListBackupPlanAssociationsResponse", + "GetBackupPlanAssociationRequest", + "DeleteBackupPlanAssociationRequest", + "TriggerBackupRequest", + }, +) + + +class BackupPlanAssociation(proto.Message): + r"""A BackupPlanAssociation represents a single + BackupPlanAssociation which contains details like workload, + backup plan etc + + Attributes: + name (str): + Output only. Identifier. The resource name of + BackupPlanAssociation in below format Format : + + projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId} + resource_type (str): + Optional. Resource type of workload on which + backupplan is applied + resource (str): + Required. Immutable. Resource name of + workload on which backupplan is applied + backup_plan (str): + Required. Resource name of backup plan which + needs to be applied on workload. Format: + + projects/{project}/locations/{location}/backupPlans/{backupPlanId} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + state (google.cloud.backupdr_v1.types.BackupPlanAssociation.State): + Output only. The BackupPlanAssociation + resource state. + rules_config_info (MutableSequence[google.cloud.backupdr_v1.types.RuleConfigInfo]): + Output only. The config info related to + backup rules. + data_source (str): + Output only. Output Only. + + Resource name of data source which will be used + as storage location for backups taken. + Format : + + projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource} + """ + + class State(proto.Enum): + r"""Enum for State of BackupPlan Association + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The resource is being created. + ACTIVE (2): + The resource has been created and is fully + usable. + DELETING (3): + The resource is being deleted. + INACTIVE (4): + The resource has been created but is not + usable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + INACTIVE = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + resource_type: str = proto.Field( + proto.STRING, + number=2, + ) + resource: str = proto.Field( + proto.STRING, + number=3, + ) + backup_plan: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + rules_config_info: MutableSequence["RuleConfigInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="RuleConfigInfo", + ) + data_source: str = proto.Field( + proto.STRING, + number=9, + ) + + +class RuleConfigInfo(proto.Message): + r"""Message for rules config info. + + Attributes: + rule_id (str): + Output only. Output Only. + + Backup Rule id fetched from backup plan. + last_backup_state (google.cloud.backupdr_v1.types.RuleConfigInfo.LastBackupState): + Output only. The last backup state for rule. + last_backup_error (google.rpc.status_pb2.Status): + Output only. Output Only. + + google.rpc.Status object to store the last + backup error. + last_successful_backup_consistency_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The point in time when the last + successful backup was captured from the source. + """ + + class LastBackupState(proto.Enum): + r"""Enum for LastBackupState + + Values: + LAST_BACKUP_STATE_UNSPECIFIED (0): + State not set. + FIRST_BACKUP_PENDING (1): + The first backup is pending. + PERMISSION_DENIED (2): + The most recent backup could not be + run/failed because of the lack of permissions. + SUCCEEDED (3): + The last backup operation succeeded. + FAILED (4): + The last backup operation failed. + """ + LAST_BACKUP_STATE_UNSPECIFIED = 0 + FIRST_BACKUP_PENDING = 1 + PERMISSION_DENIED = 2 + SUCCEEDED = 3 + FAILED = 4 + + rule_id: str = proto.Field( + proto.STRING, + number=1, + ) + last_backup_state: LastBackupState = proto.Field( + proto.ENUM, + number=3, + enum=LastBackupState, + ) + last_backup_error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + last_successful_backup_consistency_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class CreateBackupPlanAssociationRequest(proto.Message): + r"""Request message for creating a backup plan. + + Attributes: + parent (str): + Required. The backup plan association project and location + in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + backup_plan_association_id (str): + Required. The name of the backup plan + association to create. The name must be unique + for the specified project and location. + backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_association_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_plan_association: "BackupPlanAssociation" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupPlanAssociation", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupPlanAssociationsRequest(proto.Message): + r"""Request message for List BackupPlanAssociation + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations for + all locations, use "-" for the ``{location}`` value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupPlanAssociationsResponse(proto.Message): + r"""Response message for List BackupPlanAssociation + + Attributes: + backup_plan_associations (MutableSequence[google.cloud.backupdr_v1.types.BackupPlanAssociation]): + The list of Backup Plan Associations in the project for the + specified location. + + If the ``{location}`` value in the request is "-", the + response contains a list of instances from all locations. In + case any location is unreachable, the response will only + return backup plan associations in reachable locations and + the 'unreachable' field will be populated with a list of + unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_plan_associations: MutableSequence[ + "BackupPlanAssociation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupPlanAssociation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupPlanAssociationRequest(proto.Message): + r"""Request message for getting a BackupPlanAssociation resource. + + Attributes: + name (str): + Required. Name of the backup plan association resource, in + the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupPlanAssociationRequest(proto.Message): + r"""Request message for deleting a backup plan association. + + Attributes: + name (str): + Required. Name of the backup plan association resource, in + the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class TriggerBackupRequest(proto.Message): + r"""Request message for triggering a backup. + + Attributes: + name (str): + Required. Name of the backup plan association resource, in + the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + rule_id (str): + Required. backup rule_id for which a backup needs to be + triggered. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py new file mode 100644 index 000000000000..ced3cd195702 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py @@ -0,0 +1,2065 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.backupdr_v1.types import backupvault_ba, backupvault_gce + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupConfigState", + "BackupView", + "BackupVaultView", + "BackupVault", + "DataSource", + "BackupConfigInfo", + "GcpBackupConfig", + "BackupApplianceBackupConfig", + "DataSourceGcpResource", + "DataSourceBackupApplianceApplication", + "ServiceLockInfo", + "BackupApplianceLockInfo", + "BackupLock", + "Backup", + "CreateBackupVaultRequest", + "ListBackupVaultsRequest", + "ListBackupVaultsResponse", + "FetchUsableBackupVaultsRequest", + "FetchUsableBackupVaultsResponse", + "GetBackupVaultRequest", + "UpdateBackupVaultRequest", + "DeleteBackupVaultRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "GetDataSourceRequest", + "UpdateDataSourceRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "GetBackupRequest", + "UpdateBackupRequest", + "DeleteBackupRequest", + "RestoreBackupRequest", + "RestoreBackupResponse", + "TargetResource", + "GcpResource", + }, +) + + +class BackupConfigState(proto.Enum): + r"""Backup configuration state. Is the resource configured for + backup? + + Values: + BACKUP_CONFIG_STATE_UNSPECIFIED (0): + The possible states of backup configuration. + Status not set. + ACTIVE (1): + The data source is actively protected (i.e. + there is a BackupPlanAssociation or Appliance + SLA pointing to it) + PASSIVE (2): + The data source is no longer protected (but + may have backups under it) + """ + BACKUP_CONFIG_STATE_UNSPECIFIED = 0 + ACTIVE = 1 + PASSIVE = 2 + + +class BackupView(proto.Enum): + r"""BackupView contains enum options for Partial and Full view. + + Values: + BACKUP_VIEW_UNSPECIFIED (0): + If the value is not set, the default 'FULL' + view is used. + BACKUP_VIEW_BASIC (1): + Includes basic data about the Backup, but not + the full contents. + BACKUP_VIEW_FULL (2): + Includes all data about the Backup. + This is the default value (for both ListBackups + and GetBackup). + """ + BACKUP_VIEW_UNSPECIFIED = 0 + BACKUP_VIEW_BASIC = 1 + BACKUP_VIEW_FULL = 2 + + +class BackupVaultView(proto.Enum): + r"""BackupVaultView contains enum options for Partial and Full + view. + + Values: + BACKUP_VAULT_VIEW_UNSPECIFIED (0): + If the value is not set, the default 'FULL' + view is used. + BACKUP_VAULT_VIEW_BASIC (1): + Includes basic data about the Backup Vault, + but not the full contents. + BACKUP_VAULT_VIEW_FULL (2): + Includes all data about the Backup Vault. + This is the default value (for both + ListBackupVaults and GetBackupVault). + """ + BACKUP_VAULT_VIEW_UNSPECIFIED = 0 + BACKUP_VAULT_VIEW_BASIC = 1 + BACKUP_VAULT_VIEW_FULL = 2 + + +class BackupVault(proto.Message): + r"""Message describing a BackupVault object. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. Name of the backup vault to create. + It must have the + format\ ``"projects/{project}/locations/{location}/backupVaults/{backupvault}"``. + ``{backupvault}`` cannot be changed after creation. It must + be between 3-63 characters long and must be unique within + the project and location. + description (str): + Optional. The description of the BackupVault + instance (2048 characters or less). + + This field is a member of `oneof`_ ``_description``. + labels (MutableMapping[str, str]): + Optional. Resource labels to represent user + provided metadata. No labels currently defined: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + + This field is a member of `oneof`_ ``_create_time``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + + This field is a member of `oneof`_ ``_update_time``. + backup_minimum_enforced_retention_duration (google.protobuf.duration_pb2.Duration): + Required. The default and minimum enforced + retention for each backup within the backup + vault. The enforced retention for each backup + can be extended. + + This field is a member of `oneof`_ ``_backup_minimum_enforced_retention_duration``. + deletable (bool): + Output only. Set to true when there are no + backups nested under this resource. + + This field is a member of `oneof`_ ``_deletable``. + etag (str): + Optional. Server specified ETag for the + backup vault resource to prevent simultaneous + updates from overwiting each other. + + This field is a member of `oneof`_ ``_etag``. + state (google.cloud.backupdr_v1.types.BackupVault.State): + Output only. The BackupVault resource + instance state. + effective_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Time after which the BackupVault + resource is locked. + + This field is a member of `oneof`_ ``_effective_time``. + backup_count (int): + Output only. The number of backups in this + backup vault. + service_account (str): + Output only. Service account used by the + BackupVault Service for this BackupVault. The + user should grant this account permissions in + their workload project to enable the service to + run backups and restores there. + total_stored_bytes (int): + Output only. Total size of the storage used + by all backup resources. + uid (str): + Output only. Output only + Immutable after resource creation until resource + deletion. + annotations (MutableMapping[str, str]): + Optional. User annotations. See + https://google.aip.dev/128#annotations Stores + small amounts of arbitrary data. + access_restriction (google.cloud.backupdr_v1.types.BackupVault.AccessRestriction): + Optional. Note: This field is added for future use case and + will not be supported in the current release. + + Optional. + + Access restriction for the backup vault. Default value is + WITHIN_ORGANIZATION if not provided during creation. + """ + + class State(proto.Enum): + r"""Holds the state of the backup vault resource. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The backup vault is being created. + ACTIVE (2): + The backup vault has been created and is + fully usable. + DELETING (3): + The backup vault is being deleted. + ERROR (4): + The backup vault is experiencing an issue and + might be unusable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + ERROR = 4 + + class AccessRestriction(proto.Enum): + r"""Holds the access restriction for the backup vault. + + Values: + ACCESS_RESTRICTION_UNSPECIFIED (0): + Access restriction not set. + WITHIN_PROJECT (1): + Access to or from resources outside your + current project will be denied. + WITHIN_ORGANIZATION (2): + Access to or from resources outside your + current organization will be denied. + UNRESTRICTED (3): + No access restriction. + """ + ACCESS_RESTRICTION_UNSPECIFIED = 0 + WITHIN_PROJECT = 1 + WITHIN_ORGANIZATION = 2 + UNRESTRICTED = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message=timestamp_pb2.Timestamp, + ) + backup_minimum_enforced_retention_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=20, + optional=True, + message=duration_pb2.Duration, + ) + deletable: bool = proto.Field( + proto.BOOL, + number=8, + optional=True, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + optional=True, + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) + effective_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + optional=True, + message=timestamp_pb2.Timestamp, + ) + backup_count: int = proto.Field( + proto.INT64, + number=17, + ) + service_account: str = proto.Field( + proto.STRING, + number=18, + ) + total_stored_bytes: int = proto.Field( + proto.INT64, + number=19, + ) + uid: str = proto.Field( + proto.STRING, + number=21, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=22, + ) + access_restriction: AccessRestriction = proto.Field( + proto.ENUM, + number=24, + enum=AccessRestriction, + ) + + +class DataSource(proto.Message): + r"""Message describing a DataSource object. + Datasource object used to represent Datasource details for both + admin and basic view. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. Name of the datasource to create. + It must have the + format\ ``"projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}"``. + ``{datasource}`` cannot be changed after creation. It must + be between 3-63 characters long and must be unique within + the backup vault. + state (google.cloud.backupdr_v1.types.DataSource.State): + Output only. The DataSource resource instance + state. + labels (MutableMapping[str, str]): + Optional. Resource labels to represent user + provided metadata. No labels currently defined: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + + This field is a member of `oneof`_ ``_create_time``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + + This field is a member of `oneof`_ ``_update_time``. + backup_count (int): + Number of backups in the data source. + + This field is a member of `oneof`_ ``_backup_count``. + etag (str): + Server specified ETag for the + ManagementServer resource to prevent + simultaneous updates from overwiting each other. + + This field is a member of `oneof`_ ``_etag``. + total_stored_bytes (int): + The number of bytes (metadata and data) + stored in this datasource. + + This field is a member of `oneof`_ ``_total_stored_bytes``. + config_state (google.cloud.backupdr_v1.types.BackupConfigState): + Output only. The backup configuration state. + backup_config_info (google.cloud.backupdr_v1.types.BackupConfigInfo): + Output only. Details of how the resource is + configured for backup. + data_source_gcp_resource (google.cloud.backupdr_v1.types.DataSourceGcpResource): + The backed up resource is a Google Cloud + resource. The word 'DataSource' was included in + the names to indicate that this is the + representation of the Google Cloud resource used + within the DataSource object. + + This field is a member of `oneof`_ ``source_resource``. + data_source_backup_appliance_application (google.cloud.backupdr_v1.types.DataSourceBackupApplianceApplication): + The backed up resource is a backup appliance + application. + + This field is a member of `oneof`_ ``source_resource``. + """ + + class State(proto.Enum): + r"""Holds the state of the data source resource. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The data source is being created. + ACTIVE (2): + The data source has been created and is fully + usable. + DELETING (3): + The data source is being deleted. + ERROR (4): + The data source is experiencing an issue and + might be unusable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + ERROR = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=21, + enum=State, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=timestamp_pb2.Timestamp, + ) + backup_count: int = proto.Field( + proto.INT64, + number=7, + optional=True, + ) + etag: str = proto.Field( + proto.STRING, + number=14, + optional=True, + ) + total_stored_bytes: int = proto.Field( + proto.INT64, + number=23, + optional=True, + ) + config_state: "BackupConfigState" = proto.Field( + proto.ENUM, + number=24, + enum="BackupConfigState", + ) + backup_config_info: "BackupConfigInfo" = proto.Field( + proto.MESSAGE, + number=25, + message="BackupConfigInfo", + ) + data_source_gcp_resource: "DataSourceGcpResource" = proto.Field( + proto.MESSAGE, + number=26, + oneof="source_resource", + message="DataSourceGcpResource", + ) + data_source_backup_appliance_application: "DataSourceBackupApplianceApplication" = ( + proto.Field( + proto.MESSAGE, + number=27, + oneof="source_resource", + message="DataSourceBackupApplianceApplication", + ) + ) + + +class BackupConfigInfo(proto.Message): + r"""BackupConfigInfo has information about how the resource is + configured for Backup and about the most recent backup to this + vault. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + last_backup_state (google.cloud.backupdr_v1.types.BackupConfigInfo.LastBackupState): + Output only. The status of the last backup to + this BackupVault + last_successful_backup_consistency_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. If the last backup were + successful, this field has the consistency date. + last_backup_error (google.rpc.status_pb2.Status): + Output only. If the last backup failed, this + field has the error message. + gcp_backup_config (google.cloud.backupdr_v1.types.GcpBackupConfig): + Configuration for a Google Cloud resource. + + This field is a member of `oneof`_ ``backup_config``. + backup_appliance_backup_config (google.cloud.backupdr_v1.types.BackupApplianceBackupConfig): + Configuration for an application backed up by + a Backup Appliance. + + This field is a member of `oneof`_ ``backup_config``. + """ + + class LastBackupState(proto.Enum): + r"""LastBackupstate tracks whether the last backup was not yet + started, successful, failed, or could not be run because of the + lack of permissions. + + Values: + LAST_BACKUP_STATE_UNSPECIFIED (0): + Status not set. + FIRST_BACKUP_PENDING (1): + The first backup has not yet completed + SUCCEEDED (2): + The most recent backup was successful + FAILED (3): + The most recent backup failed + PERMISSION_DENIED (4): + The most recent backup could not be + run/failed because of the lack of permissions + """ + LAST_BACKUP_STATE_UNSPECIFIED = 0 + FIRST_BACKUP_PENDING = 1 + SUCCEEDED = 2 + FAILED = 3 + PERMISSION_DENIED = 4 + + last_backup_state: LastBackupState = proto.Field( + proto.ENUM, + number=1, + enum=LastBackupState, + ) + last_successful_backup_consistency_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + last_backup_error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=3, + message=status_pb2.Status, + ) + gcp_backup_config: "GcpBackupConfig" = proto.Field( + proto.MESSAGE, + number=4, + oneof="backup_config", + message="GcpBackupConfig", + ) + backup_appliance_backup_config: "BackupApplianceBackupConfig" = proto.Field( + proto.MESSAGE, + number=5, + oneof="backup_config", + message="BackupApplianceBackupConfig", + ) + + +class GcpBackupConfig(proto.Message): + r"""GcpBackupConfig captures the Backup configuration details for + Google Cloud resources. All Google Cloud resources regardless of + type are protected with backup plan associations. + + Attributes: + backup_plan (str): + The name of the backup plan. + backup_plan_description (str): + The description of the backup plan. + backup_plan_association (str): + The name of the backup plan association. + backup_plan_rules (MutableSequence[str]): + The names of the backup plan rules which + point to this backupvault + """ + + backup_plan: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_description: str = proto.Field( + proto.STRING, + number=2, + ) + backup_plan_association: str = proto.Field( + proto.STRING, + number=3, + ) + backup_plan_rules: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class BackupApplianceBackupConfig(proto.Message): + r"""BackupApplianceBackupConfig captures the backup configuration + for applications that are protected by Backup Appliances. + + Attributes: + backup_appliance_name (str): + The name of the backup appliance. + backup_appliance_id (int): + The ID of the backup appliance. + sla_id (int): + The ID of the SLA of this application. + application_name (str): + The name of the application. + host_name (str): + The name of the host where the application is + running. + slt_name (str): + The name of the SLT associated with the + application. + slp_name (str): + The name of the SLP associated with the + application. + """ + + backup_appliance_name: str = proto.Field( + proto.STRING, + number=1, + ) + backup_appliance_id: int = proto.Field( + proto.INT64, + number=2, + ) + sla_id: int = proto.Field( + proto.INT64, + number=3, + ) + application_name: str = proto.Field( + proto.STRING, + number=4, + ) + host_name: str = proto.Field( + proto.STRING, + number=5, + ) + slt_name: str = proto.Field( + proto.STRING, + number=6, + ) + slp_name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class DataSourceGcpResource(proto.Message): + r"""DataSourceGcpResource is used for protected resources that + are Google Cloud Resources. This name is easeier to understand + than GcpResourceDataSource or GcpDataSourceResource + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcp_resourcename (str): + Output only. Full resource pathname URL of + the source Google Cloud resource. + location (str): + Location of the resource: + //"global"/"unspecified". + type_ (str): + The type of the Google Cloud resource. Use + the Unified Resource Type, eg. + compute.googleapis.com/Instance. + compute_instance_datasource_properties (google.cloud.backupdr_v1.types.ComputeInstanceDataSourceProperties): + ComputeInstanceDataSourceProperties has a + subset of Compute Instance properties that are + useful at the Datasource level. + + This field is a member of `oneof`_ ``gcp_resource_properties``. + """ + + gcp_resourcename: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + compute_instance_datasource_properties: backupvault_gce.ComputeInstanceDataSourceProperties = proto.Field( + proto.MESSAGE, + number=4, + oneof="gcp_resource_properties", + message=backupvault_gce.ComputeInstanceDataSourceProperties, + ) + + +class DataSourceBackupApplianceApplication(proto.Message): + r"""BackupApplianceApplication describes a Source Resource when + it is an application backed up by a BackupAppliance. + + Attributes: + application_name (str): + The name of the Application as known to the + Backup Appliance. + backup_appliance (str): + Appliance name. + appliance_id (int): + Appliance Id of the Backup Appliance. + type_ (str): + The type of the application. e.g. VMBackup + application_id (int): + The appid field of the application within the + Backup Appliance. + hostname (str): + Hostname of the host where the application is + running. + host_id (int): + Hostid of the application host. + """ + + application_name: str = proto.Field( + proto.STRING, + number=1, + ) + backup_appliance: str = proto.Field( + proto.STRING, + number=2, + ) + appliance_id: int = proto.Field( + proto.INT64, + number=3, + ) + type_: str = proto.Field( + proto.STRING, + number=4, + ) + application_id: int = proto.Field( + proto.INT64, + number=8, + ) + hostname: str = proto.Field( + proto.STRING, + number=6, + ) + host_id: int = proto.Field( + proto.INT64, + number=7, + ) + + +class ServiceLockInfo(proto.Message): + r"""ServiceLockInfo represents the details of a lock taken by the + service on a Backup resource. + + Attributes: + operation (str): + Output only. The name of the operation that + created this lock. The lock will automatically + be released when the operation completes. + """ + + operation: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BackupApplianceLockInfo(proto.Message): + r"""BackupApplianceLockInfo contains metadata about the + backupappliance that created the lock. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backup_appliance_id (int): + Required. The ID of the backup/recovery + appliance that created this lock. + backup_appliance_name (str): + Required. The name of the backup/recovery + appliance that created this lock. + lock_reason (str): + Required. The reason for the lock: e.g. + MOUNT/RESTORE/BACKUP/etc. The value of this + string is only meaningful to the client and it + is not interpreted by the BackupVault service. + job_name (str): + The job name on the backup/recovery appliance + that created this lock. + + This field is a member of `oneof`_ ``lock_source``. + backup_image (str): + The image name that depends on this Backup. + + This field is a member of `oneof`_ ``lock_source``. + sla_id (int): + The SLA on the backup/recovery appliance that + owns the lock. + + This field is a member of `oneof`_ ``lock_source``. + """ + + backup_appliance_id: int = proto.Field( + proto.INT64, + number=1, + ) + backup_appliance_name: str = proto.Field( + proto.STRING, + number=2, + ) + lock_reason: str = proto.Field( + proto.STRING, + number=5, + ) + job_name: str = proto.Field( + proto.STRING, + number=6, + oneof="lock_source", + ) + backup_image: str = proto.Field( + proto.STRING, + number=7, + oneof="lock_source", + ) + sla_id: int = proto.Field( + proto.INT64, + number=8, + oneof="lock_source", + ) + + +class BackupLock(proto.Message): + r"""BackupLock represents a single lock on a Backup resource. An + unexpired lock on a Backup prevents the Backup from being + deleted. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + lock_until_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The time after which this lock is + not considered valid and will no longer protect + the Backup from deletion. + backup_appliance_lock_info (google.cloud.backupdr_v1.types.BackupApplianceLockInfo): + If the client is a backup and recovery + appliance, this contains metadata about why the + lock exists. + + This field is a member of `oneof`_ ``ClientLockInfo``. + service_lock_info (google.cloud.backupdr_v1.types.ServiceLockInfo): + Output only. Contains metadata about the lock + exist for Google Cloud native backups. + + This field is a member of `oneof`_ ``ClientLockInfo``. + """ + + lock_until_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + backup_appliance_lock_info: "BackupApplianceLockInfo" = proto.Field( + proto.MESSAGE, + number=3, + oneof="ClientLockInfo", + message="BackupApplianceLockInfo", + ) + service_lock_info: "ServiceLockInfo" = proto.Field( + proto.MESSAGE, + number=4, + oneof="ClientLockInfo", + message="ServiceLockInfo", + ) + + +class Backup(proto.Message): + r"""Message describing a Backup object. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. Name of the backup to create. It + must have the + format\ ``"projects//locations//backupVaults//dataSources/{datasource}/backups/{backup}"``. + ``{backup}`` cannot be changed after creation. It must be + between 3-63 characters long and must be unique within the + datasource. + description (str): + Output only. The description of the Backup + instance (2048 characters or less). + + This field is a member of `oneof`_ ``_description``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + + This field is a member of `oneof`_ ``_create_time``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + updated. + + This field is a member of `oneof`_ ``_update_time``. + labels (MutableMapping[str, str]): + Optional. Resource labels to represent user + provided metadata. No labels currently defined. + enforced_retention_end_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The backup can not be deleted + before this time. + + This field is a member of `oneof`_ ``_enforced_retention_end_time``. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. When this backup is automatically + expired. + + This field is a member of `oneof`_ ``_expire_time``. + consistency_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The point in time when this + backup was captured from the source. + + This field is a member of `oneof`_ ``_consistency_time``. + etag (str): + Optional. Server specified ETag to prevent + updates from overwriting each other. + + This field is a member of `oneof`_ ``_etag``. + state (google.cloud.backupdr_v1.types.Backup.State): + Output only. The Backup resource instance + state. + service_locks (MutableSequence[google.cloud.backupdr_v1.types.BackupLock]): + Output only. The list of BackupLocks taken by + the service to prevent the deletion of the + backup. + backup_appliance_locks (MutableSequence[google.cloud.backupdr_v1.types.BackupLock]): + Optional. The list of BackupLocks taken by + the accessor Backup Appliance. + compute_instance_backup_properties (google.cloud.backupdr_v1.types.ComputeInstanceBackupProperties): + Output only. Compute Engine specific backup + properties. + + This field is a member of `oneof`_ ``backup_properties``. + backup_appliance_backup_properties (google.cloud.backupdr_v1.types.BackupApplianceBackupProperties): + Output only. Backup Appliance specific backup + properties. + + This field is a member of `oneof`_ ``backup_properties``. + backup_type (google.cloud.backupdr_v1.types.Backup.BackupType): + Output only. Type of the backup, unspecified, + scheduled or ondemand. + gcp_backup_plan_info (google.cloud.backupdr_v1.types.Backup.GCPBackupPlanInfo): + Output only. Configuration for a Google Cloud + resource. + + This field is a member of `oneof`_ ``plan_info``. + resource_size_bytes (int): + Output only. source resource size in bytes at + the time of the backup. + """ + + class State(proto.Enum): + r"""Holds the state of the backup resource. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The backup is being created. + ACTIVE (2): + The backup has been created and is fully + usable. + DELETING (3): + The backup is being deleted. + ERROR (4): + The backup is experiencing an issue and might + be unusable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + ERROR = 4 + + class BackupType(proto.Enum): + r"""Type of the backup, scheduled or ondemand. + + Values: + BACKUP_TYPE_UNSPECIFIED (0): + Backup type is unspecified. + SCHEDULED (1): + Scheduled backup. + ON_DEMAND (2): + On demand backup. + """ + BACKUP_TYPE_UNSPECIFIED = 0 + SCHEDULED = 1 + ON_DEMAND = 2 + + class GCPBackupPlanInfo(proto.Message): + r"""GCPBackupPlanInfo captures the plan configuration details of + Google Cloud resources at the time of backup. + + Attributes: + backup_plan (str): + Resource name of backup plan by which + workload is protected at the time of the backup. + Format: + + projects/{project}/locations/{location}/backupPlans/{backupPlanId} + backup_plan_rule_id (str): + The rule id of the backup plan which + triggered this backup in case of scheduled + backup or used for + """ + + backup_plan: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_rule_id: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + enforced_retention_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message=timestamp_pb2.Timestamp, + ) + consistency_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + optional=True, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=11, + optional=True, + ) + state: State = proto.Field( + proto.ENUM, + number=15, + enum=State, + ) + service_locks: MutableSequence["BackupLock"] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message="BackupLock", + ) + backup_appliance_locks: MutableSequence["BackupLock"] = proto.RepeatedField( + proto.MESSAGE, + number=18, + message="BackupLock", + ) + compute_instance_backup_properties: backupvault_gce.ComputeInstanceBackupProperties = proto.Field( + proto.MESSAGE, + number=19, + oneof="backup_properties", + message=backupvault_gce.ComputeInstanceBackupProperties, + ) + backup_appliance_backup_properties: backupvault_ba.BackupApplianceBackupProperties = proto.Field( + proto.MESSAGE, + number=21, + oneof="backup_properties", + message=backupvault_ba.BackupApplianceBackupProperties, + ) + backup_type: BackupType = proto.Field( + proto.ENUM, + number=20, + enum=BackupType, + ) + gcp_backup_plan_info: GCPBackupPlanInfo = proto.Field( + proto.MESSAGE, + number=22, + oneof="plan_info", + message=GCPBackupPlanInfo, + ) + resource_size_bytes: int = proto.Field( + proto.INT64, + number=23, + ) + + +class CreateBackupVaultRequest(proto.Message): + r"""Message for creating a BackupVault. + + Attributes: + parent (str): + Required. Value for parent. + backup_vault_id (str): + Required. ID of the requesting object If auto-generating ID + server-side, remove this field and backup_vault_id from the + method_signature of Create RPC + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being created + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is 'false'. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_vault_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_vault: "BackupVault" = proto.Field( + proto.MESSAGE, + number=3, + message="BackupVault", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class ListBackupVaultsRequest(proto.Message): + r"""Request message for listing backupvault stores. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve backupvault stores for + all locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + view (google.cloud.backupdr_v1.types.BackupVaultView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup Vault. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + view: "BackupVaultView" = proto.Field( + proto.ENUM, + number=6, + enum="BackupVaultView", + ) + + +class ListBackupVaultsResponse(proto.Message): + r"""Response message for listing BackupVaults. + + Attributes: + backup_vaults (MutableSequence[google.cloud.backupdr_v1.types.BackupVault]): + The list of BackupVault instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return + backup vaults in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_vaults: MutableSequence["BackupVault"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupVault", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class FetchUsableBackupVaultsRequest(proto.Message): + r"""Request message for fetching usable BackupVaults. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backupvault stores information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve backupvault stores for + all locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class FetchUsableBackupVaultsResponse(proto.Message): + r"""Response message for fetching usable BackupVaults. + + Attributes: + backup_vaults (MutableSequence[google.cloud.backupdr_v1.types.BackupVault]): + The list of BackupVault instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return + backup vaults in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_vaults: MutableSequence["BackupVault"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupVault", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupVaultRequest(proto.Message): + r"""Request message for getting a BackupVault. + + Attributes: + name (str): + Required. Name of the backupvault store resource name, in + the format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' + view (google.cloud.backupdr_v1.types.BackupVaultView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup Vault + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "BackupVaultView" = proto.Field( + proto.ENUM, + number=2, + enum="BackupVaultView", + ) + + +class UpdateBackupVaultRequest(proto.Message): + r"""Request message for updating a BackupVault. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the BackupVault resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then the request will fail. + backup_vault (google.cloud.backupdr_v1.types.BackupVault): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is 'false'. + force (bool): + Optional. If set to true, will not check plan + duration against backup vault enforcement + duration. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup_vault: "BackupVault" = proto.Field( + proto.MESSAGE, + number=2, + message="BackupVault", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + force: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DeleteBackupVaultRequest(proto.Message): + r"""Message for deleting a BackupVault. + + Attributes: + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + force (bool): + Optional. If set to true, any data source + from this backup vault will also be deleted. + etag (str): + The current etag of the backup vault. + If an etag is provided and does not match the + current etag of the connection, deletion will be + blocked. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is 'false'. + allow_missing (bool): + Optional. If true and the BackupVault is not + found, the request will succeed but no action + will be taken. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + etag: str = proto.Field( + proto.STRING, + number=4, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class ListDataSourcesRequest(proto.Message): + r"""Request message for listing DataSources. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + data sources information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve data sources for all + locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataSourcesResponse(proto.Message): + r"""Response message for listing DataSources. + + Attributes: + data_sources (MutableSequence[google.cloud.backupdr_v1.types.DataSource]): + The list of DataSource instances in the + project for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return data + sources in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + data_sources: MutableSequence["DataSource"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataSource", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetDataSourceRequest(proto.Message): + r"""Request message for getting a DataSource instance. + + Attributes: + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDataSourceRequest(proto.Message): + r"""Request message for updating a data source instance. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the DataSource resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then the request will fail. + data_source (google.cloud.backupdr_v1.types.DataSource): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + allow_missing (bool): + Optional. Enable upsert. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + data_source: "DataSource" = proto.Field( + proto.MESSAGE, + number=2, + message="DataSource", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class ListBackupsRequest(proto.Message): + r"""Request message for listing Backups. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + backup information, in the format + 'projects/{project_id}/locations/{location}'. In Cloud + Backup and DR, locations map to Google Cloud regions, for + example **us-central1**. To retrieve data sources for all + locations, use "-" for the '{location}' value. + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint for how to order the results. + view (google.cloud.backupdr_v1.types.BackupView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup resource. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + view: "BackupView" = proto.Field( + proto.ENUM, + number=6, + enum="BackupView", + ) + + +class ListBackupsResponse(proto.Message): + r"""Response message for listing Backups. + + Attributes: + backups (MutableSequence[google.cloud.backupdr_v1.types.Backup]): + The list of Backup instances in the project + for the specified location. + + If the '{location}' value in the request is "-", + the response contains a list of instances from + all locations. In case any location is + unreachable, the response will only return data + sources in reachable locations and the + 'unreachable' field will be populated with a + list of unreachable locations. + next_page_token (str): + A token identifying a page of results the + server should return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backups: MutableSequence["Backup"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Backup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupRequest(proto.Message): + r"""Request message for getting a Backup. + + Attributes: + name (str): + Required. Name of the data source resource name, in the + format + 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' + view (google.cloud.backupdr_v1.types.BackupView): + Optional. Reserved for future use to provide + a BASIC & FULL view of Backup resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "BackupView" = proto.Field( + proto.ENUM, + number=2, + enum="BackupView", + ) + + +class UpdateBackupRequest(proto.Message): + r"""Request message for updating a Backup. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the fields to be + overwritten in the Backup resource by the update. The fields + specified in the update_mask are relative to the resource, + not the full request. A field will be overwritten if it is + in the mask. If the user does not provide a mask then the + request will fail. + backup (google.cloud.backupdr_v1.types.Backup): + Required. The resource being updated + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + backup: "Backup" = proto.Field( + proto.MESSAGE, + number=2, + message="Backup", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteBackupRequest(proto.Message): + r"""Message for deleting a Backup. + + Attributes: + name (str): + Required. Name of the resource. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RestoreBackupRequest(proto.Message): + r"""Request message for restoring from a Backup. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The resource name of the Backup instance, in the + format + 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + compute_instance_target_environment (google.cloud.backupdr_v1.types.ComputeInstanceTargetEnvironment): + Compute Engine target environment to be used + during restore. + + This field is a member of `oneof`_ ``target_environment``. + compute_instance_restore_properties (google.cloud.backupdr_v1.types.ComputeInstanceRestoreProperties): + Compute Engine instance properties to be + overridden during restore. + + This field is a member of `oneof`_ ``instance_properties``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + compute_instance_target_environment: backupvault_gce.ComputeInstanceTargetEnvironment = proto.Field( + proto.MESSAGE, + number=3, + oneof="target_environment", + message=backupvault_gce.ComputeInstanceTargetEnvironment, + ) + compute_instance_restore_properties: backupvault_gce.ComputeInstanceRestoreProperties = proto.Field( + proto.MESSAGE, + number=4, + oneof="instance_properties", + message=backupvault_gce.ComputeInstanceRestoreProperties, + ) + + +class RestoreBackupResponse(proto.Message): + r"""Response message for restoring from a Backup. + + Attributes: + target_resource (google.cloud.backupdr_v1.types.TargetResource): + Details of the target resource + created/modified as part of restore. + """ + + target_resource: "TargetResource" = proto.Field( + proto.MESSAGE, + number=1, + message="TargetResource", + ) + + +class TargetResource(proto.Message): + r"""Details of the target resource created/modified as part of + restore. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcp_resource (google.cloud.backupdr_v1.types.GcpResource): + Details of the native Google Cloud resource + created as part of restore. + + This field is a member of `oneof`_ ``target_resource_info``. + """ + + gcp_resource: "GcpResource" = proto.Field( + proto.MESSAGE, + number=1, + oneof="target_resource_info", + message="GcpResource", + ) + + +class GcpResource(proto.Message): + r"""Minimum details to identify a Google Cloud resource + + Attributes: + gcp_resourcename (str): + Name of the Google Cloud resource. + location (str): + Location of the resource: + //"global"/"unspecified". + type_ (str): + Type of the resource. Use the Unified + Resource Type, eg. + compute.googleapis.com/Instance. + """ + + gcp_resourcename: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py new file mode 100644 index 000000000000..131f54b56abe --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_ba.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "BackupApplianceBackupProperties", + }, +) + + +class BackupApplianceBackupProperties(proto.Message): + r"""BackupApplianceBackupProperties represents BackupDR backup + appliance's properties. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + generation_id (int): + Output only. The numeric generation ID of the + backup (monotonically increasing). + + This field is a member of `oneof`_ ``_generation_id``. + finalize_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when this backup object + was finalized (if none, backup is not + finalized). + + This field is a member of `oneof`_ ``_finalize_time``. + recovery_range_start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The earliest timestamp of data + available in this Backup. + + This field is a member of `oneof`_ ``_recovery_range_start_time``. + recovery_range_end_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The latest timestamp of data + available in this Backup. + + This field is a member of `oneof`_ ``_recovery_range_end_time``. + """ + + generation_id: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + finalize_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=timestamp_pb2.Timestamp, + ) + recovery_range_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=timestamp_pb2.Timestamp, + ) + recovery_range_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py new file mode 100644 index 000000000000..9e3e98632644 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py @@ -0,0 +1,1991 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "KeyRevocationActionType", + "ComputeInstanceBackupProperties", + "ComputeInstanceRestoreProperties", + "ComputeInstanceTargetEnvironment", + "ComputeInstanceDataSourceProperties", + "AdvancedMachineFeatures", + "ConfidentialInstanceConfig", + "DisplayDevice", + "AcceleratorConfig", + "CustomerEncryptionKey", + "Entry", + "Metadata", + "NetworkInterface", + "NetworkPerformanceConfig", + "AccessConfig", + "AliasIpRange", + "InstanceParams", + "AllocationAffinity", + "Scheduling", + "SchedulingDuration", + "ServiceAccount", + "Tags", + "AttachedDisk", + "GuestOsFeature", + }, +) + + +class KeyRevocationActionType(proto.Enum): + r"""Specifies whether the virtual machine instance will be shut + down on key revocation. It is currently used in instance, + instance properties and GMI protos + + Values: + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + NONE (1): + Indicates user chose no operation. + STOP (2): + Indicates user chose to opt for VM shutdown + on key revocation. + """ + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED = 0 + NONE = 1 + STOP = 2 + + +class ComputeInstanceBackupProperties(proto.Message): + r"""ComputeInstanceBackupProperties represents Compute Engine + instance backup properties. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + An optional text description for the + instances that are created from these + properties. + + This field is a member of `oneof`_ ``_description``. + tags (google.cloud.backupdr_v1.types.Tags): + A list of tags to apply to the instances that + are created from these properties. The tags + identify valid sources or targets for network + firewalls. The setTags method can modify this + list of tags. Each tag within the list must + comply with RFC1035 + (https://www.ietf.org/rfc/rfc1035.txt). + + This field is a member of `oneof`_ ``_tags``. + machine_type (str): + The machine type to use for instances that + are created from these properties. + + This field is a member of `oneof`_ ``_machine_type``. + can_ip_forward (bool): + Enables instances created based on these properties to send + packets with source IP addresses other than their own and + receive packets with destination IP addresses other than + their own. If these instances will be used as an IP gateway + or it will be set as the next-hop in a Route resource, + specify ``true``. If unsure, leave this set to ``false``. + See the + https://cloud.google.com/vpc/docs/using-routes#canipforward + documentation for more information. + + This field is a member of `oneof`_ ``_can_ip_forward``. + network_interface (MutableSequence[google.cloud.backupdr_v1.types.NetworkInterface]): + An array of network access configurations for + this interface. + disk (MutableSequence[google.cloud.backupdr_v1.types.AttachedDisk]): + An array of disks that are associated with + the instances that are created from these + properties. + metadata (google.cloud.backupdr_v1.types.Metadata): + The metadata key/value pairs to assign to + instances that are created from these + properties. These pairs can consist of custom + metadata or predefined keys. See + https://cloud.google.com/compute/docs/metadata/overview + for more information. + + This field is a member of `oneof`_ ``_metadata``. + service_account (MutableSequence[google.cloud.backupdr_v1.types.ServiceAccount]): + A list of service accounts with specified + scopes. Access tokens for these service accounts + are available to the instances that are created + from these properties. Use metadata queries to + obtain the access tokens for these instances. + scheduling (google.cloud.backupdr_v1.types.Scheduling): + Specifies the scheduling options for the + instances that are created from these + properties. + + This field is a member of `oneof`_ ``_scheduling``. + guest_accelerator (MutableSequence[google.cloud.backupdr_v1.types.AcceleratorConfig]): + A list of guest accelerator cards' type and + count to use for instances created from these + properties. + min_cpu_platform (str): + Minimum cpu/platform to be used by instances. The instance + may be scheduled on the specified or newer cpu/platform. + Applicable values are the friendly names of CPU platforms, + such as ``minCpuPlatform: Intel Haswell`` or + ``minCpuPlatform: Intel Sandy Bridge``. For more + information, read + https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform. + + This field is a member of `oneof`_ ``_min_cpu_platform``. + key_revocation_action_type (google.cloud.backupdr_v1.types.KeyRevocationActionType): + KeyRevocationActionType of the instance. + Supported options are "STOP" and "NONE". The + default value is "NONE" if it is not specified. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. + source_instance (str): + The source instance used to create this + backup. This can be a partial or full URL to the + resource. For example, the following are valid + values: + + -https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance + -projects/project/zones/zone/instances/instance + + This field is a member of `oneof`_ ``_source_instance``. + labels (MutableMapping[str, str]): + Labels to apply to instances that are created + from these properties. + """ + + description: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + tags: "Tags" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="Tags", + ) + machine_type: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + can_ip_forward: bool = proto.Field( + proto.BOOL, + number=4, + optional=True, + ) + network_interface: MutableSequence["NetworkInterface"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="NetworkInterface", + ) + disk: MutableSequence["AttachedDisk"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="AttachedDisk", + ) + metadata: "Metadata" = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message="Metadata", + ) + service_account: MutableSequence["ServiceAccount"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="ServiceAccount", + ) + scheduling: "Scheduling" = proto.Field( + proto.MESSAGE, + number=9, + optional=True, + message="Scheduling", + ) + guest_accelerator: MutableSequence["AcceleratorConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="AcceleratorConfig", + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=11, + optional=True, + ) + key_revocation_action_type: "KeyRevocationActionType" = proto.Field( + proto.ENUM, + number=12, + optional=True, + enum="KeyRevocationActionType", + ) + source_instance: str = proto.Field( + proto.STRING, + number=13, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=14, + ) + + +class ComputeInstanceRestoreProperties(proto.Message): + r"""ComputeInstanceRestoreProperties represents Compute Engine + instance properties to be overridden during restore. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. Name of the compute instance. + + This field is a member of `oneof`_ ``_name``. + advanced_machine_features (google.cloud.backupdr_v1.types.AdvancedMachineFeatures): + Optional. Controls for advanced + machine-related behavior features. + + This field is a member of `oneof`_ ``_advanced_machine_features``. + can_ip_forward (bool): + Optional. Allows this instance to send and + receive packets with non-matching destination or + source IPs. + + This field is a member of `oneof`_ ``_can_ip_forward``. + confidential_instance_config (google.cloud.backupdr_v1.types.ConfidentialInstanceConfig): + Optional. Controls Confidential compute + options on the instance + + This field is a member of `oneof`_ ``_confidential_instance_config``. + deletion_protection (bool): + Optional. Whether the resource should be + protected against deletion. + + This field is a member of `oneof`_ ``_deletion_protection``. + description (str): + Optional. An optional description of this + resource. Provide this property when you create + the resource. + + This field is a member of `oneof`_ ``_description``. + disks (MutableSequence[google.cloud.backupdr_v1.types.AttachedDisk]): + Optional. Array of disks associated with this + instance. Persistent disks must be created + before you can assign them. + display_device (google.cloud.backupdr_v1.types.DisplayDevice): + Optional. Enables display device for the + instance. + + This field is a member of `oneof`_ ``_display_device``. + guest_accelerators (MutableSequence[google.cloud.backupdr_v1.types.AcceleratorConfig]): + Optional. A list of the type and count of + accelerator cards attached to the instance. + hostname (str): + Optional. Specifies the hostname of the instance. The + specified hostname must be RFC1035 compliant. If hostname is + not specified, the default hostname is + [INSTANCE_NAME].c.[PROJECT_ID].internal when using the + global DNS, and + [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using + zonal DNS. + + This field is a member of `oneof`_ ``_hostname``. + instance_encryption_key (google.cloud.backupdr_v1.types.CustomerEncryptionKey): + Optional. Encrypts suspended data for an + instance with a customer-managed encryption key. + + This field is a member of `oneof`_ ``_instance_encryption_key``. + key_revocation_action_type (google.cloud.backupdr_v1.types.KeyRevocationActionType): + Optional. KeyRevocationActionType of the + instance. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. + labels (MutableMapping[str, str]): + Optional. Labels to apply to this instance. + machine_type (str): + Optional. Full or partial URL of the machine + type resource to use for this instance. + + This field is a member of `oneof`_ ``_machine_type``. + metadata (google.cloud.backupdr_v1.types.Metadata): + Optional. This includes custom metadata and + predefined keys. + + This field is a member of `oneof`_ ``_metadata``. + min_cpu_platform (str): + Optional. Minimum CPU platform to use for + this instance. + + This field is a member of `oneof`_ ``_min_cpu_platform``. + network_interfaces (MutableSequence[google.cloud.backupdr_v1.types.NetworkInterface]): + Optional. An array of network configurations + for this instance. These specify how interfaces + are configured to interact with other network + services, such as connecting to the internet. + Multiple interfaces are supported per instance. + network_performance_config (google.cloud.backupdr_v1.types.NetworkPerformanceConfig): + Optional. Configure network performance such + as egress bandwidth tier. + + This field is a member of `oneof`_ ``_network_performance_config``. + params (google.cloud.backupdr_v1.types.InstanceParams): + Input only. Additional params passed with the + request, but not persisted as part of resource + payload. + + This field is a member of `oneof`_ ``_params``. + private_ipv6_google_access (google.cloud.backupdr_v1.types.ComputeInstanceRestoreProperties.InstancePrivateIpv6GoogleAccess): + Optional. The private IPv6 google access type for the VM. If + not specified, use INHERIT_FROM_SUBNETWORK as default. + + This field is a member of `oneof`_ ``_private_ipv6_google_access``. + allocation_affinity (google.cloud.backupdr_v1.types.AllocationAffinity): + Optional. Specifies the reservations that + this instance can consume from. + + This field is a member of `oneof`_ ``_allocation_affinity``. + resource_policies (MutableSequence[str]): + Optional. Resource policies applied to this + instance. + scheduling (google.cloud.backupdr_v1.types.Scheduling): + Optional. Sets the scheduling options for + this instance. + + This field is a member of `oneof`_ ``_scheduling``. + service_accounts (MutableSequence[google.cloud.backupdr_v1.types.ServiceAccount]): + Optional. A list of service accounts, with + their specified scopes, authorized for this + instance. Only one service account per VM + instance is supported. + tags (google.cloud.backupdr_v1.types.Tags): + Optional. Tags to apply to this instance. + Tags are used to identify valid sources or + targets for network firewalls and are specified + by the client during instance creation. + + This field is a member of `oneof`_ ``_tags``. + """ + + class InstancePrivateIpv6GoogleAccess(proto.Enum): + r"""The private IPv6 google access type for the VMs. + + Values: + INSTANCE_PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED (0): + Default value. This value is unused. + INHERIT_FROM_SUBNETWORK (1): + Each network interface inherits + PrivateIpv6GoogleAccess from its subnetwork. + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE (2): + Outbound private IPv6 access from VMs in this + subnet to Google services. If specified, the + subnetwork who is attached to the instance's + default network interface will be assigned an + internal IPv6 prefix if it doesn't have before. + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE (3): + Bidirectional private IPv6 access to/from + Google services. If specified, the subnetwork + who is attached to the instance's default + network interface will be assigned an internal + IPv6 prefix if it doesn't have before. + """ + INSTANCE_PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED = 0 + INHERIT_FROM_SUBNETWORK = 1 + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE = 2 + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + advanced_machine_features: "AdvancedMachineFeatures" = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message="AdvancedMachineFeatures", + ) + can_ip_forward: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + confidential_instance_config: "ConfidentialInstanceConfig" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="ConfidentialInstanceConfig", + ) + deletion_protection: bool = proto.Field( + proto.BOOL, + number=5, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + disks: MutableSequence["AttachedDisk"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="AttachedDisk", + ) + display_device: "DisplayDevice" = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message="DisplayDevice", + ) + guest_accelerators: MutableSequence["AcceleratorConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="AcceleratorConfig", + ) + hostname: str = proto.Field( + proto.STRING, + number=10, + optional=True, + ) + instance_encryption_key: "CustomerEncryptionKey" = proto.Field( + proto.MESSAGE, + number=11, + optional=True, + message="CustomerEncryptionKey", + ) + key_revocation_action_type: "KeyRevocationActionType" = proto.Field( + proto.ENUM, + number=12, + optional=True, + enum="KeyRevocationActionType", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + machine_type: str = proto.Field( + proto.STRING, + number=14, + optional=True, + ) + metadata: "Metadata" = proto.Field( + proto.MESSAGE, + number=15, + optional=True, + message="Metadata", + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=16, + optional=True, + ) + network_interfaces: MutableSequence["NetworkInterface"] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message="NetworkInterface", + ) + network_performance_config: "NetworkPerformanceConfig" = proto.Field( + proto.MESSAGE, + number=18, + optional=True, + message="NetworkPerformanceConfig", + ) + params: "InstanceParams" = proto.Field( + proto.MESSAGE, + number=19, + optional=True, + message="InstanceParams", + ) + private_ipv6_google_access: InstancePrivateIpv6GoogleAccess = proto.Field( + proto.ENUM, + number=20, + optional=True, + enum=InstancePrivateIpv6GoogleAccess, + ) + allocation_affinity: "AllocationAffinity" = proto.Field( + proto.MESSAGE, + number=21, + optional=True, + message="AllocationAffinity", + ) + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22, + ) + scheduling: "Scheduling" = proto.Field( + proto.MESSAGE, + number=23, + optional=True, + message="Scheduling", + ) + service_accounts: MutableSequence["ServiceAccount"] = proto.RepeatedField( + proto.MESSAGE, + number=24, + message="ServiceAccount", + ) + tags: "Tags" = proto.Field( + proto.MESSAGE, + number=26, + optional=True, + message="Tags", + ) + + +class ComputeInstanceTargetEnvironment(proto.Message): + r"""ComputeInstanceTargetEnvironment represents Compute Engine + target environment to be used during restore. + + Attributes: + project (str): + Required. Target project for the Compute + Engine instance. + zone (str): + Required. The zone of the Compute Engine + instance. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ComputeInstanceDataSourceProperties(proto.Message): + r"""ComputeInstanceDataSourceProperties represents the properties + of a ComputeEngine resource that are stored in the DataSource. + + Attributes: + name (str): + Name of the compute instance backed up by the + datasource. + description (str): + The description of the Compute Engine + instance. + machine_type (str): + The machine type of the instance. + total_disk_count (int): + The total number of disks attached to the + Instance. + total_disk_size_gb (int): + The sum of all the disk sizes. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + machine_type: str = proto.Field( + proto.STRING, + number=3, + ) + total_disk_count: int = proto.Field( + proto.INT64, + number=4, + ) + total_disk_size_gb: int = proto.Field( + proto.INT64, + number=5, + ) + + +class AdvancedMachineFeatures(proto.Message): + r"""Specifies options for controlling advanced machine features. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_nested_virtualization (bool): + Optional. Whether to enable nested + virtualization or not (default is false). + + This field is a member of `oneof`_ ``_enable_nested_virtualization``. + threads_per_core (int): + Optional. The number of threads per physical + core. To disable simultaneous multithreading + (SMT) set this to 1. If unset, the maximum + number of threads supported per core by the + underlying processor is assumed. + + This field is a member of `oneof`_ ``_threads_per_core``. + visible_core_count (int): + Optional. The number of physical cores to + expose to an instance. Multiply by the number of + threads per core to compute the total number of + virtual CPUs to expose to the instance. If + unset, the number of cores is inferred from the + instance's nominal CPU count and the underlying + platform's SMT width. + + This field is a member of `oneof`_ ``_visible_core_count``. + enable_uefi_networking (bool): + Optional. Whether to enable UEFI networking + for instance creation. + + This field is a member of `oneof`_ ``_enable_uefi_networking``. + """ + + enable_nested_virtualization: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + threads_per_core: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + visible_core_count: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + enable_uefi_networking: bool = proto.Field( + proto.BOOL, + number=4, + optional=True, + ) + + +class ConfidentialInstanceConfig(proto.Message): + r"""A set of Confidential Instance options. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_confidential_compute (bool): + Optional. Defines whether the instance should + have confidential compute enabled. + + This field is a member of `oneof`_ ``_enable_confidential_compute``. + """ + + enable_confidential_compute: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + + +class DisplayDevice(proto.Message): + r"""A set of Display Device options + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_display (bool): + Optional. Enables display for the Compute + Engine VM + + This field is a member of `oneof`_ ``_enable_display``. + """ + + enable_display: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + + +class AcceleratorConfig(proto.Message): + r"""A specification of the type and number of accelerator cards + attached to the instance. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + accelerator_type (str): + Optional. Full or partial URL of the + accelerator type resource to attach to this + instance. + + This field is a member of `oneof`_ ``_accelerator_type``. + accelerator_count (int): + Optional. The number of the guest accelerator + cards exposed to this instance. + + This field is a member of `oneof`_ ``_accelerator_count``. + """ + + accelerator_type: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + accelerator_count: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + +class CustomerEncryptionKey(proto.Message): + r"""A customer-supplied encryption key. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + raw_key (str): + Optional. Specifies a 256-bit + customer-supplied encryption key. + + This field is a member of `oneof`_ ``key``. + rsa_encrypted_key (str): + Optional. RSA-wrapped 2048-bit + customer-supplied encryption key to either + encrypt or decrypt this resource. + + This field is a member of `oneof`_ ``key``. + kms_key_name (str): + Optional. The name of the encryption key that + is stored in Google Cloud KMS. + + This field is a member of `oneof`_ ``key``. + kms_key_service_account (str): + Optional. The service account being used for + the encryption request for the given KMS key. If + absent, the Compute Engine default service + account is used. + + This field is a member of `oneof`_ ``_kms_key_service_account``. + """ + + raw_key: str = proto.Field( + proto.STRING, + number=1, + oneof="key", + ) + rsa_encrypted_key: str = proto.Field( + proto.STRING, + number=2, + oneof="key", + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=3, + oneof="key", + ) + kms_key_service_account: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +class Entry(proto.Message): + r"""A key/value pair to be used for storing metadata. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + Optional. Key for the metadata entry. + + This field is a member of `oneof`_ ``_key``. + value (str): + Optional. Value for the metadata entry. These + are free-form strings, and only have meaning as + interpreted by the image running in the + instance. The only restriction placed on values + is that their size must be less than or equal to + 262144 bytes (256 KiB). + + This field is a member of `oneof`_ ``_value``. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + value: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class Metadata(proto.Message): + r"""A metadata key/value entry. + + Attributes: + items (MutableSequence[google.cloud.backupdr_v1.types.Entry]): + Optional. Array of key/value pairs. The total + size of all keys and values must be less than + 512 KB. + """ + + items: MutableSequence["Entry"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Entry", + ) + + +class NetworkInterface(proto.Message): + r"""A network interface resource attached to an instance. + s + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Optional. URL of the VPC network resource for + this instance. + + This field is a member of `oneof`_ ``_network``. + subnetwork (str): + Optional. The URL of the Subnetwork resource + for this instance. + + This field is a member of `oneof`_ ``_subnetwork``. + ip_address (str): + Optional. An IPv4 internal IP address to + assign to the instance for this network + interface. If not specified by the user, an + unused internal IP is assigned by the system. + + This field is a member of `oneof`_ ``_ip_address``. + ipv6_address (str): + Optional. An IPv6 internal network address + for this network interface. To use a static + internal IP address, it must be unused and in + the same region as the instance's zone. If not + specified, Google Cloud will automatically + assign an internal IPv6 address from the + instance's subnetwork. + + This field is a member of `oneof`_ ``_ipv6_address``. + internal_ipv6_prefix_length (int): + Optional. The prefix length of the primary + internal IPv6 range. + + This field is a member of `oneof`_ ``_internal_ipv6_prefix_length``. + name (str): + Output only. [Output Only] The name of the network + interface, which is generated by the server. + + This field is a member of `oneof`_ ``_name``. + access_configs (MutableSequence[google.cloud.backupdr_v1.types.AccessConfig]): + Optional. An array of configurations for this interface. + Currently, only one access config,ONE_TO_ONE_NAT is + supported. If there are no accessConfigs specified, then + this instance will have no external internet access. + ipv6_access_configs (MutableSequence[google.cloud.backupdr_v1.types.AccessConfig]): + Optional. An array of IPv6 access configurations for this + interface. Currently, only one IPv6 access config, + DIRECT_IPV6, is supported. If there is no ipv6AccessConfig + specified, then this instance will have no external IPv6 + Internet access. + alias_ip_ranges (MutableSequence[google.cloud.backupdr_v1.types.AliasIpRange]): + Optional. An array of alias IP ranges for + this network interface. You can only specify + this field for network interfaces in VPC + networks. + stack_type (google.cloud.backupdr_v1.types.NetworkInterface.StackType): + The stack type for this network interface. + + This field is a member of `oneof`_ ``_stack_type``. + ipv6_access_type (google.cloud.backupdr_v1.types.NetworkInterface.Ipv6AccessType): + Optional. [Output Only] One of EXTERNAL, INTERNAL to + indicate whether the IP can be accessed from the Internet. + This field is always inherited from its subnetwork. + + This field is a member of `oneof`_ ``_ipv6_access_type``. + queue_count (int): + Optional. The networking queue count that's + specified by users for the network interface. + Both Rx and Tx queues will be set to this + number. It'll be empty if not specified by the + users. + + This field is a member of `oneof`_ ``_queue_count``. + nic_type (google.cloud.backupdr_v1.types.NetworkInterface.NicType): + Optional. The type of vNIC to be used on this + interface. This may be gVNIC or VirtioNet. + + This field is a member of `oneof`_ ``_nic_type``. + network_attachment (str): + Optional. The URL of the network attachment that this + interface should connect to in the following format: + projects/{project_number}/regions/{region_name}/networkAttachments/{network_attachment_name}. + + This field is a member of `oneof`_ ``_network_attachment``. + """ + + class StackType(proto.Enum): + r"""Stack type for this network interface. + + Values: + STACK_TYPE_UNSPECIFIED (0): + Default should be STACK_TYPE_UNSPECIFIED. + IPV4_ONLY (1): + The network interface will be assigned IPv4 + address. + IPV4_IPV6 (2): + The network interface can have both IPv4 and + IPv6 addresses. + """ + STACK_TYPE_UNSPECIFIED = 0 + IPV4_ONLY = 1 + IPV4_IPV6 = 2 + + class Ipv6AccessType(proto.Enum): + r"""IPv6 access type for this network interface. + + Values: + UNSPECIFIED_IPV6_ACCESS_TYPE (0): + IPv6 access type not set. Means this network + interface hasn't been turned on IPv6 yet. + INTERNAL (1): + This network interface can have internal + IPv6. + EXTERNAL (2): + This network interface can have external + IPv6. + """ + UNSPECIFIED_IPV6_ACCESS_TYPE = 0 + INTERNAL = 1 + EXTERNAL = 2 + + class NicType(proto.Enum): + r"""Nic type for this network interface. + + Values: + NIC_TYPE_UNSPECIFIED (0): + Default should be NIC_TYPE_UNSPECIFIED. + VIRTIO_NET (1): + VIRTIO + GVNIC (2): + GVNIC + """ + NIC_TYPE_UNSPECIFIED = 0 + VIRTIO_NET = 1 + GVNIC = 2 + + network: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + ip_address: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + ipv6_address: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + internal_ipv6_prefix_length: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=6, + optional=True, + ) + access_configs: MutableSequence["AccessConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="AccessConfig", + ) + ipv6_access_configs: MutableSequence["AccessConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="AccessConfig", + ) + alias_ip_ranges: MutableSequence["AliasIpRange"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="AliasIpRange", + ) + stack_type: StackType = proto.Field( + proto.ENUM, + number=10, + optional=True, + enum=StackType, + ) + ipv6_access_type: Ipv6AccessType = proto.Field( + proto.ENUM, + number=11, + optional=True, + enum=Ipv6AccessType, + ) + queue_count: int = proto.Field( + proto.INT32, + number=12, + optional=True, + ) + nic_type: NicType = proto.Field( + proto.ENUM, + number=13, + optional=True, + enum=NicType, + ) + network_attachment: str = proto.Field( + proto.STRING, + number=14, + optional=True, + ) + + +class NetworkPerformanceConfig(proto.Message): + r"""Network performance configuration. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + total_egress_bandwidth_tier (google.cloud.backupdr_v1.types.NetworkPerformanceConfig.Tier): + Optional. The tier of the total egress + bandwidth. + + This field is a member of `oneof`_ ``_total_egress_bandwidth_tier``. + """ + + class Tier(proto.Enum): + r"""Network performance tier. + + Values: + TIER_UNSPECIFIED (0): + This value is unused. + DEFAULT (1): + Default network performance config. + TIER_1 (2): + Tier 1 network performance config. + """ + TIER_UNSPECIFIED = 0 + DEFAULT = 1 + TIER_1 = 2 + + total_egress_bandwidth_tier: Tier = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Tier, + ) + + +class AccessConfig(proto.Message): + r"""An access configuration attached to an instance's network + interface. Only one access config per instance is supported. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.backupdr_v1.types.AccessConfig.AccessType): + Optional. In accessConfigs (IPv4), the default and only + option is ONE_TO_ONE_NAT. In ipv6AccessConfigs, the default + and only option is DIRECT_IPV6. + + This field is a member of `oneof`_ ``_type``. + name (str): + Optional. The name of this access + configuration. + + This field is a member of `oneof`_ ``_name``. + external_ip (str): + Optional. The external IP address of this + access configuration. + + This field is a member of `oneof`_ ``_external_ip``. + external_ipv6 (str): + Optional. The external IPv6 address of this + access configuration. + + This field is a member of `oneof`_ ``_external_ipv6``. + external_ipv6_prefix_length (int): + Optional. The prefix length of the external + IPv6 range. + + This field is a member of `oneof`_ ``_external_ipv6_prefix_length``. + set_public_ptr (bool): + Optional. Specifies whether a public DNS + 'PTR' record should be created to map the + external IP address of the instance to a DNS + domain name. + + This field is a member of `oneof`_ ``_set_public_ptr``. + public_ptr_domain_name (str): + Optional. The DNS domain name for the public + PTR record. + + This field is a member of `oneof`_ ``_public_ptr_domain_name``. + network_tier (google.cloud.backupdr_v1.types.AccessConfig.NetworkTier): + Optional. This signifies the networking tier + used for configuring this access + + This field is a member of `oneof`_ ``_network_tier``. + """ + + class AccessType(proto.Enum): + r"""The type of configuration. + + Values: + ACCESS_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + ONE_TO_ONE_NAT (1): + ONE_TO_ONE_NAT + DIRECT_IPV6 (2): + Direct IPv6 access. + """ + ACCESS_TYPE_UNSPECIFIED = 0 + ONE_TO_ONE_NAT = 1 + DIRECT_IPV6 = 2 + + class NetworkTier(proto.Enum): + r"""Network tier property used by addresses, instances and + forwarding rules. + + Values: + NETWORK_TIER_UNSPECIFIED (0): + Default value. This value is unused. + PREMIUM (1): + High quality, Google-grade network tier, + support for all networking products. + STANDARD (2): + Public internet quality, only limited support + for other networking products. + """ + NETWORK_TIER_UNSPECIFIED = 0 + PREMIUM = 1 + STANDARD = 2 + + type_: AccessType = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=AccessType, + ) + name: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + external_ip: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + external_ipv6: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + external_ipv6_prefix_length: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + set_public_ptr: bool = proto.Field( + proto.BOOL, + number=6, + optional=True, + ) + public_ptr_domain_name: str = proto.Field( + proto.STRING, + number=7, + optional=True, + ) + network_tier: NetworkTier = proto.Field( + proto.ENUM, + number=8, + optional=True, + enum=NetworkTier, + ) + + +class AliasIpRange(proto.Message): + r"""An alias IP range attached to an instance's network + interface. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + ip_cidr_range (str): + Optional. The IP alias ranges to allocate for + this interface. + + This field is a member of `oneof`_ ``_ip_cidr_range``. + subnetwork_range_name (str): + Optional. The name of a subnetwork secondary + IP range from which to allocate an IP alias + range. If not specified, the primary range of + the subnetwork is used. + + This field is a member of `oneof`_ ``_subnetwork_range_name``. + """ + + ip_cidr_range: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + subnetwork_range_name: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class InstanceParams(proto.Message): + r"""Additional instance params. + + Attributes: + resource_manager_tags (MutableMapping[str, str]): + Optional. Resource manager tags to be bound + to the instance. + """ + + resource_manager_tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + +class AllocationAffinity(proto.Message): + r"""Specifies the reservations that this instance can consume + from. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + consume_allocation_type (google.cloud.backupdr_v1.types.AllocationAffinity.Type): + Optional. Specifies the type of reservation + from which this instance can consume + + This field is a member of `oneof`_ ``_consume_allocation_type``. + key (str): + Optional. Corresponds to the label key of a + reservation resource. + + This field is a member of `oneof`_ ``_key``. + values (MutableSequence[str]): + Optional. Corresponds to the label values of + a reservation resource. + """ + + class Type(proto.Enum): + r"""Indicates whether to consume from a reservation or not. + + Values: + TYPE_UNSPECIFIED (0): + Default value. This value is unused. + NO_RESERVATION (1): + Do not consume from any allocated capacity. + ANY_RESERVATION (2): + Consume any allocation available. + SPECIFIC_RESERVATION (3): + Must consume from a specific reservation. + Must specify key value fields for specifying the + reservations. + """ + TYPE_UNSPECIFIED = 0 + NO_RESERVATION = 1 + ANY_RESERVATION = 2 + SPECIFIC_RESERVATION = 3 + + consume_allocation_type: Type = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Type, + ) + key: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class Scheduling(proto.Message): + r"""Sets the scheduling options for an Instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + on_host_maintenance (google.cloud.backupdr_v1.types.Scheduling.OnHostMaintenance): + Optional. Defines the maintenance behavior + for this instance. + + This field is a member of `oneof`_ ``_on_host_maintenance``. + automatic_restart (bool): + Optional. Specifies whether the instance + should be automatically restarted if it is + terminated by Compute Engine (not terminated by + a user). + + This field is a member of `oneof`_ ``_automatic_restart``. + preemptible (bool): + Optional. Defines whether the instance is + preemptible. + + This field is a member of `oneof`_ ``_preemptible``. + node_affinities (MutableSequence[google.cloud.backupdr_v1.types.Scheduling.NodeAffinity]): + Optional. A set of node affinity and + anti-affinity configurations. Overrides + reservationAffinity. + min_node_cpus (int): + Optional. The minimum number of virtual CPUs + this instance will consume when running on a + sole-tenant node. + + This field is a member of `oneof`_ ``_min_node_cpus``. + provisioning_model (google.cloud.backupdr_v1.types.Scheduling.ProvisioningModel): + Optional. Specifies the provisioning model of + the instance. + + This field is a member of `oneof`_ ``_provisioning_model``. + instance_termination_action (google.cloud.backupdr_v1.types.Scheduling.InstanceTerminationAction): + Optional. Specifies the termination action + for the instance. + + This field is a member of `oneof`_ ``_instance_termination_action``. + local_ssd_recovery_timeout (google.cloud.backupdr_v1.types.SchedulingDuration): + Optional. Specifies the maximum amount of + time a Local Ssd Vm should wait while recovery + of the Local Ssd state is attempted. Its value + should be in between 0 and 168 hours with hour + granularity and the default value being 1 hour. + + This field is a member of `oneof`_ ``_local_ssd_recovery_timeout``. + """ + + class OnHostMaintenance(proto.Enum): + r"""Defines the maintenance behavior for this instance= + + Values: + ON_HOST_MAINTENANCE_UNSPECIFIED (0): + Default value. This value is unused. + TERMINATE (1): + Tells Compute Engine to terminate and + (optionally) restart the instance away from the + maintenance activity. + MIGRATE (1000): + Default, Allows Compute Engine to + automatically migrate instances out of the way + of maintenance events. + """ + ON_HOST_MAINTENANCE_UNSPECIFIED = 0 + TERMINATE = 1 + MIGRATE = 1000 + + class ProvisioningModel(proto.Enum): + r"""Defines the provisioning model for an instance. + + Values: + PROVISIONING_MODEL_UNSPECIFIED (0): + Default value. This value is not used. + STANDARD (1): + Standard provisioning with user controlled + runtime, no discounts. + SPOT (2): + Heavily discounted, no guaranteed runtime. + """ + PROVISIONING_MODEL_UNSPECIFIED = 0 + STANDARD = 1 + SPOT = 2 + + class InstanceTerminationAction(proto.Enum): + r"""Defines the supported termination actions for an instance. + + Values: + INSTANCE_TERMINATION_ACTION_UNSPECIFIED (0): + Default value. This value is unused. + DELETE (1): + Delete the VM. + STOP (2): + Stop the VM without storing in-memory + content. default action. + """ + INSTANCE_TERMINATION_ACTION_UNSPECIFIED = 0 + DELETE = 1 + STOP = 2 + + class NodeAffinity(proto.Message): + r"""Node Affinity: the configuration of desired nodes onto which + this Instance could be scheduled. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + Optional. Corresponds to the label key of + Node resource. + + This field is a member of `oneof`_ ``_key``. + operator (google.cloud.backupdr_v1.types.Scheduling.NodeAffinity.Operator): + Optional. Defines the operation of node + selection. + + This field is a member of `oneof`_ ``_operator``. + values (MutableSequence[str]): + Optional. Corresponds to the label values of + Node resource. + """ + + class Operator(proto.Enum): + r"""Defines the type of node selections. + + Values: + OPERATOR_UNSPECIFIED (0): + Default value. This value is unused. + IN (1): + Requires Compute Engine to seek for matched + nodes. + NOT_IN (2): + Requires Compute Engine to avoid certain + nodes. + """ + OPERATOR_UNSPECIFIED = 0 + IN = 1 + NOT_IN = 2 + + key: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + operator: "Scheduling.NodeAffinity.Operator" = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum="Scheduling.NodeAffinity.Operator", + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + on_host_maintenance: OnHostMaintenance = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=OnHostMaintenance, + ) + automatic_restart: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + preemptible: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + node_affinities: MutableSequence[NodeAffinity] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=NodeAffinity, + ) + min_node_cpus: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + provisioning_model: ProvisioningModel = proto.Field( + proto.ENUM, + number=6, + optional=True, + enum=ProvisioningModel, + ) + instance_termination_action: InstanceTerminationAction = proto.Field( + proto.ENUM, + number=7, + optional=True, + enum=InstanceTerminationAction, + ) + local_ssd_recovery_timeout: "SchedulingDuration" = proto.Field( + proto.MESSAGE, + number=10, + optional=True, + message="SchedulingDuration", + ) + + +class SchedulingDuration(proto.Message): + r"""A SchedulingDuration represents a fixed-length span of time + represented as a count of seconds and fractions of seconds at + nanosecond resolution. It is independent of any calendar and + concepts like "day" or "month". Range is approximately 10,000 + years. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + seconds (int): + Optional. Span of time at a resolution of a + second. + + This field is a member of `oneof`_ ``_seconds``. + nanos (int): + Optional. Span of time that's a fraction of a + second at nanosecond resolution. + + This field is a member of `oneof`_ ``_nanos``. + """ + + seconds: int = proto.Field( + proto.INT64, + number=1, + optional=True, + ) + nanos: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + +class ServiceAccount(proto.Message): + r"""A service account. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + email (str): + Optional. Email address of the service + account. + + This field is a member of `oneof`_ ``_email``. + scopes (MutableSequence[str]): + Optional. The list of scopes to be made + available for this service account. + """ + + email: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class Tags(proto.Message): + r"""A set of instance tags. + + Attributes: + items (MutableSequence[str]): + Optional. An array of tags. Each tag must be + 1-63 characters long, and comply with RFC1035. + """ + + items: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class AttachedDisk(proto.Message): + r"""An instance-attached disk resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + initialize_params (google.cloud.backupdr_v1.types.AttachedDisk.InitializeParams): + Optional. Specifies the parameters to + initialize this disk. + + This field is a member of `oneof`_ ``_initialize_params``. + device_name (str): + Optional. This is used as an identifier for the disks. This + is the unique name has to provided to modify disk parameters + like disk_name and replica_zones (in case of RePDs) + + This field is a member of `oneof`_ ``_device_name``. + kind (str): + Optional. Type of the resource. + + This field is a member of `oneof`_ ``_kind``. + disk_type_deprecated (google.cloud.backupdr_v1.types.AttachedDisk.DiskType): + Specifies the type of the disk. + + This field is a member of `oneof`_ ``_disk_type_deprecated``. + mode (google.cloud.backupdr_v1.types.AttachedDisk.DiskMode): + Optional. The mode in which to attach this + disk. + + This field is a member of `oneof`_ ``_mode``. + source (str): + Optional. Specifies a valid partial or full + URL to an existing Persistent Disk resource. + + This field is a member of `oneof`_ ``_source``. + index (int): + Optional. A zero-based index to this disk, + where 0 is reserved for the boot disk. + + This field is a member of `oneof`_ ``_index``. + boot (bool): + Optional. Indicates that this is a boot disk. + The virtual machine will use the first partition + of the disk for its root filesystem. + + This field is a member of `oneof`_ ``_boot``. + auto_delete (bool): + Optional. Specifies whether the disk will be + auto-deleted when the instance is deleted (but + not when the disk is detached from the + instance). + + This field is a member of `oneof`_ ``_auto_delete``. + license_ (MutableSequence[str]): + Optional. Any valid publicly visible + licenses. + disk_interface (google.cloud.backupdr_v1.types.AttachedDisk.DiskInterface): + Optional. Specifies the disk interface to use + for attaching this disk. + + This field is a member of `oneof`_ ``_disk_interface``. + guest_os_feature (MutableSequence[google.cloud.backupdr_v1.types.GuestOsFeature]): + Optional. A list of features to enable on the + guest operating system. Applicable only for + bootable images. + disk_encryption_key (google.cloud.backupdr_v1.types.CustomerEncryptionKey): + Optional. Encrypts or decrypts a disk using a + customer-supplied encryption key. + + This field is a member of `oneof`_ ``_disk_encryption_key``. + disk_size_gb (int): + Optional. The size of the disk in GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. + saved_state (google.cloud.backupdr_v1.types.AttachedDisk.DiskSavedState): + Optional. Output only. The state of the disk. + + This field is a member of `oneof`_ ``_saved_state``. + disk_type (str): + Optional. Output only. The URI of the disk + type resource. For example: + projects/project/zones/zone/diskTypes/pd-standard + or pd-ssd + + This field is a member of `oneof`_ ``_disk_type``. + type_ (google.cloud.backupdr_v1.types.AttachedDisk.DiskType): + Optional. Specifies the type of the disk. + + This field is a member of `oneof`_ ``_type``. + """ + + class DiskType(proto.Enum): + r"""List of the Disk Types. + + Values: + DISK_TYPE_UNSPECIFIED (0): + Default value, which is unused. + SCRATCH (1): + A scratch disk type. + PERSISTENT (2): + A persistent disk type. + """ + DISK_TYPE_UNSPECIFIED = 0 + SCRATCH = 1 + PERSISTENT = 2 + + class DiskMode(proto.Enum): + r"""List of the Disk Modes. + + Values: + DISK_MODE_UNSPECIFIED (0): + Default value, which is unused. + READ_WRITE (1): + Attaches this disk in read-write mode. Only + one virtual machine at a time can be attached to + a disk in read-write mode. + READ_ONLY (2): + Attaches this disk in read-only mode. + Multiple virtual machines can use a disk in + read-only mode at a time. + LOCKED (3): + The disk is locked for administrative + reasons. Nobody else can use the disk. This mode + is used (for example) when taking a snapshot of + a disk to prevent mounting the disk while it is + being snapshotted. + """ + DISK_MODE_UNSPECIFIED = 0 + READ_WRITE = 1 + READ_ONLY = 2 + LOCKED = 3 + + class DiskInterface(proto.Enum): + r"""List of the Disk Interfaces. + + Values: + DISK_INTERFACE_UNSPECIFIED (0): + Default value, which is unused. + SCSI (1): + SCSI Disk Interface. + NVME (2): + NVME Disk Interface. + NVDIMM (3): + NVDIMM Disk Interface. + ISCSI (4): + ISCSI Disk Interface. + """ + DISK_INTERFACE_UNSPECIFIED = 0 + SCSI = 1 + NVME = 2 + NVDIMM = 3 + ISCSI = 4 + + class DiskSavedState(proto.Enum): + r"""List of the states of the Disk. + + Values: + DISK_SAVED_STATE_UNSPECIFIED (0): + Default Disk state has not been preserved. + PRESERVED (1): + Disk state has been preserved. + """ + DISK_SAVED_STATE_UNSPECIFIED = 0 + PRESERVED = 1 + + class InitializeParams(proto.Message): + r"""Specifies the parameters to initialize this disk. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_name (str): + Optional. Specifies the disk name. If not + specified, the default is to use the name of the + instance. + + This field is a member of `oneof`_ ``_disk_name``. + replica_zones (MutableSequence[str]): + Optional. URL of the zone where the disk + should be created. Required for each regional + disk associated with the instance. + """ + + disk_name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + replica_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + initialize_params: InitializeParams = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message=InitializeParams, + ) + device_name: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + disk_type_deprecated: DiskType = proto.Field( + proto.ENUM, + number=6, + optional=True, + enum=DiskType, + ) + mode: DiskMode = proto.Field( + proto.ENUM, + number=7, + optional=True, + enum=DiskMode, + ) + source: str = proto.Field( + proto.STRING, + number=8, + optional=True, + ) + index: int = proto.Field( + proto.INT64, + number=9, + optional=True, + ) + boot: bool = proto.Field( + proto.BOOL, + number=10, + optional=True, + ) + auto_delete: bool = proto.Field( + proto.BOOL, + number=11, + optional=True, + ) + license_: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=12, + ) + disk_interface: DiskInterface = proto.Field( + proto.ENUM, + number=13, + optional=True, + enum=DiskInterface, + ) + guest_os_feature: MutableSequence["GuestOsFeature"] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="GuestOsFeature", + ) + disk_encryption_key: "CustomerEncryptionKey" = proto.Field( + proto.MESSAGE, + number=15, + optional=True, + message="CustomerEncryptionKey", + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=16, + optional=True, + ) + saved_state: DiskSavedState = proto.Field( + proto.ENUM, + number=17, + optional=True, + enum=DiskSavedState, + ) + disk_type: str = proto.Field( + proto.STRING, + number=18, + optional=True, + ) + type_: DiskType = proto.Field( + proto.ENUM, + number=19, + optional=True, + enum=DiskType, + ) + + +class GuestOsFeature(proto.Message): + r"""Feature type of the Guest OS. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.backupdr_v1.types.GuestOsFeature.FeatureType): + The ID of a supported feature. + + This field is a member of `oneof`_ ``_type``. + """ + + class FeatureType(proto.Enum): + r"""List of the Feature Types. + + Values: + FEATURE_TYPE_UNSPECIFIED (0): + Default value, which is unused. + VIRTIO_SCSI_MULTIQUEUE (1): + VIRTIO_SCSI_MULTIQUEUE feature type. + WINDOWS (2): + WINDOWS feature type. + MULTI_IP_SUBNET (3): + MULTI_IP_SUBNET feature type. + UEFI_COMPATIBLE (4): + UEFI_COMPATIBLE feature type. + SECURE_BOOT (5): + SECURE_BOOT feature type. + GVNIC (6): + GVNIC feature type. + SEV_CAPABLE (7): + SEV_CAPABLE feature type. + BARE_METAL_LINUX_COMPATIBLE (8): + BARE_METAL_LINUX_COMPATIBLE feature type. + SUSPEND_RESUME_COMPATIBLE (9): + SUSPEND_RESUME_COMPATIBLE feature type. + SEV_LIVE_MIGRATABLE (10): + SEV_LIVE_MIGRATABLE feature type. + SEV_SNP_CAPABLE (11): + SEV_SNP_CAPABLE feature type. + TDX_CAPABLE (12): + TDX_CAPABLE feature type. + IDPF (13): + IDPF feature type. + SEV_LIVE_MIGRATABLE_V2 (14): + SEV_LIVE_MIGRATABLE_V2 feature type. + """ + FEATURE_TYPE_UNSPECIFIED = 0 + VIRTIO_SCSI_MULTIQUEUE = 1 + WINDOWS = 2 + MULTI_IP_SUBNET = 3 + UEFI_COMPATIBLE = 4 + SECURE_BOOT = 5 + GVNIC = 6 + SEV_CAPABLE = 7 + BARE_METAL_LINUX_COMPATIBLE = 8 + SUSPEND_RESUME_COMPATIBLE = 9 + SEV_LIVE_MIGRATABLE = 10 + SEV_SNP_CAPABLE = 11 + TDX_CAPABLE = 12 + IDPF = 13 + SEV_LIVE_MIGRATABLE_V2 = 14 + + type_: FeatureType = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=FeatureType, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py new file mode 100644 index 000000000000..25dbf9cca081 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py new file mode 100644 index 000000000000..fc82ca77f706 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py new file mode 100644 index 000000000000..ff546daa2ac6 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py new file mode 100644 index 000000000000..5c648a085be1 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupPlan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_create_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + backup_plan=backup_plan, + ) + + # Make the request + operation = client.create_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py new file mode 100644 index 000000000000..1acf666c5d38 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_CreateBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_create_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Make the request + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_CreateBackupVault_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py similarity index 71% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_async.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py index a9cff017ea20..5a1abda3a275 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_external_api_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for CreateExternalApi +# Snippet for CreateBackupVault # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-cloud-backupdr -# [START apihub_v1_generated_ApiHub_CreateExternalApi_async] +# [START backupdr_v1_generated_BackupDR_CreateBackupVault_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,26 +31,27 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.cloud import backupdr_v1 -async def sample_create_external_api(): +def sample_create_backup_vault(): # Create a client - client = apihub_v1.ApiHubAsyncClient() + client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - external_api = apihub_v1.ExternalApi() - external_api.display_name = "display_name_value" - - request = apihub_v1.CreateExternalApiRequest( + request = backupdr_v1.CreateBackupVaultRequest( parent="parent_value", - external_api=external_api, + backup_vault_id="backup_vault_id_value", ) # Make the request - response = await client.create_external_api(request=request) + operation = client.create_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) -# [END apihub_v1_generated_ApiHub_CreateExternalApi_async] +# [END backupdr_v1_generated_BackupDR_CreateBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py new file mode 100644 index 000000000000..346d24a9543e --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py new file mode 100644 index 000000000000..21af239763d3 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py new file mode 100644 index 000000000000..18c4ca0cae3b --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py new file mode 100644 index 000000000000..3423852e66cd --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py new file mode 100644 index 000000000000..aaabe6e55265 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupPlanRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py similarity index 74% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_async.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py index d6a18ab4d88e..dee368f1cd32 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_linting_service_get_style_guide_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetStyleGuide +# Snippet for DeleteBackup # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-cloud-backupdr -# [START apihub_v1_generated_LintingService_GetStyleGuide_async] +# [START backupdr_v1_generated_BackupDR_DeleteBackup_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,26 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.cloud import backupdr_v1 -async def sample_get_style_guide(): +def sample_delete_backup(): # Create a client - client = apihub_v1.LintingServiceAsyncClient() + client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - request = apihub_v1.GetStyleGuideRequest( + request = backupdr_v1.DeleteBackupRequest( name="name_value", ) # Make the request - response = await client.get_style_guide(request=request) + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) -# [END apihub_v1_generated_LintingService_GetStyleGuide_async] +# [END backupdr_v1_generated_BackupDR_DeleteBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py new file mode 100644 index 000000000000..a70379011f44 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py new file mode 100644 index 000000000000..2b824ef4088e --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_DeleteBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_delete_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.DeleteBackupVaultRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_DeleteBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py new file mode 100644 index 000000000000..ab7dc9c365b4 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchUsableBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py new file mode 100644 index 000000000000..0e4abb2342d5 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchUsableBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_fetch_usable_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchUsableBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.fetch_usable_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py similarity index 77% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_async.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py index 8de6d7766e83..27f69f503b1b 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_attribute_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetAttribute +# Snippet for GetBackup # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-cloud-backupdr -# [START apihub_v1_generated_ApiHub_GetAttribute_async] +# [START backupdr_v1_generated_BackupDR_GetBackup_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.cloud import backupdr_v1 -async def sample_get_attribute(): +async def sample_get_backup(): # Create a client - client = apihub_v1.ApiHubAsyncClient() + client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - request = apihub_v1.GetAttributeRequest( + request = backupdr_v1.GetBackupRequest( name="name_value", ) # Make the request - response = await client.get_attribute(request=request) + response = await client.get_backup(request=request) # Handle the response print(response) -# [END apihub_v1_generated_ApiHub_GetAttribute_async] +# [END backupdr_v1_generated_BackupDR_GetBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py new file mode 100644 index 000000000000..666e503e039c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py new file mode 100644 index 000000000000..f16d4b5dcdc6 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py new file mode 100644 index 000000000000..bbca5985c4d3 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlan_async] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_contact_service_get_contact_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py similarity index 77% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_contact_service_get_contact_sync.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py index d5f793f8c3e8..3e6f35ccdc90 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_contact_service_get_contact_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetContact +# Snippet for GetBackupPlan # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-ads-admanager +# python3 -m pip install google-cloud-backupdr -# [START admanager_v1_generated_ContactService_GetContact_sync] +# [START backupdr_v1_generated_BackupDR_GetBackupPlan_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.ads import admanager_v1 +from google.cloud import backupdr_v1 -def sample_get_contact(): +def sample_get_backup_plan(): # Create a client - client = admanager_v1.ContactServiceClient() + client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - request = admanager_v1.GetContactRequest( + request = backupdr_v1.GetBackupPlanRequest( name="name_value", ) # Make the request - response = client.get_contact(request=request) + response = client.get_backup_plan(request=request) # Handle the response print(response) -# [END admanager_v1_generated_ContactService_GetContact_sync] +# [END backupdr_v1_generated_BackupDR_GetBackupPlan_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py similarity index 78% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_async.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py index d9a93e6ec082..064cbac8920e 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_get_api_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for GetApi +# Snippet for GetBackup # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-cloud-backupdr -# [START apihub_v1_generated_ApiHub_GetApi_async] +# [START backupdr_v1_generated_BackupDR_GetBackup_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,22 +31,22 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.cloud import backupdr_v1 -async def sample_get_api(): +def sample_get_backup(): # Create a client - client = apihub_v1.ApiHubAsyncClient() + client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - request = apihub_v1.GetApiRequest( + request = backupdr_v1.GetBackupRequest( name="name_value", ) # Make the request - response = await client.get_api(request=request) + response = client.get_backup(request=request) # Handle the response print(response) -# [END apihub_v1_generated_ApiHub_GetApi_async] +# [END backupdr_v1_generated_BackupDR_GetBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py new file mode 100644 index 000000000000..95d30ed5bf46 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_vault(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py new file mode 100644 index 000000000000..814ccccaf4a0 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupVault_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupVaultRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_vault(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py new file mode 100644 index 000000000000..864ee90db114 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetDataSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_source(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetDataSource_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py new file mode 100644 index 000000000000..95f18218de42 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetDataSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_source(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetDataSource_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py new file mode 100644 index 000000000000..e6cfd3cc039c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlanAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py new file mode 100644 index 000000000000..39b135ce9944 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlanAssociations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py similarity index 77% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_async.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py index 7d96c577933f..f09593b5796b 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_deployments_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListDeployments +# Snippet for ListBackupPlans # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-cloud-backupdr -# [START apihub_v1_generated_ApiHub_ListDeployments_async] +# [START backupdr_v1_generated_BackupDR_ListBackupPlans_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.cloud import backupdr_v1 -async def sample_list_deployments(): +async def sample_list_backup_plans(): # Create a client - client = apihub_v1.ApiHubAsyncClient() + client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - request = apihub_v1.ListDeploymentsRequest( + request = backupdr_v1.ListBackupPlansRequest( parent="parent_value", ) # Make the request - page_result = client.list_deployments(request=request) + page_result = client.list_backup_plans(request=request) # Handle the response async for response in page_result: print(response) -# [END apihub_v1_generated_ApiHub_ListDeployments_async] +# [END backupdr_v1_generated_BackupDR_ListBackupPlans_async] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_label_service_list_labels_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py similarity index 77% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_label_service_list_labels_sync.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py index bc207268f8da..ab2cab9b1701 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_label_service_list_labels_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListLabels +# Snippet for ListBackupPlans # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-ads-admanager +# python3 -m pip install google-cloud-backupdr -# [START admanager_v1_generated_LabelService_ListLabels_sync] +# [START backupdr_v1_generated_BackupDR_ListBackupPlans_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.ads import admanager_v1 +from google.cloud import backupdr_v1 -def sample_list_labels(): +def sample_list_backup_plans(): # Create a client - client = admanager_v1.LabelServiceClient() + client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - request = admanager_v1.ListLabelsRequest( + request = backupdr_v1.ListBackupPlansRequest( parent="parent_value", ) # Make the request - page_result = client.list_labels(request=request) + page_result = client.list_backup_plans(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_LabelService_ListLabels_sync] +# [END backupdr_v1_generated_BackupDR_ListBackupPlans_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py similarity index 76% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_async.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py index 9dd769c3831b..675c345b810c 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_attributes_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListAttributes +# Snippet for ListBackupVaults # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-cloud-backupdr -# [START apihub_v1_generated_ApiHub_ListAttributes_async] +# [START backupdr_v1_generated_BackupDR_ListBackupVaults_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.cloud import backupdr_v1 -async def sample_list_attributes(): +async def sample_list_backup_vaults(): # Create a client - client = apihub_v1.ApiHubAsyncClient() + client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - request = apihub_v1.ListAttributesRequest( + request = backupdr_v1.ListBackupVaultsRequest( parent="parent_value", ) # Make the request - page_result = client.list_attributes(request=request) + page_result = client.list_backup_vaults(request=request) # Handle the response async for response in page_result: print(response) -# [END apihub_v1_generated_ApiHub_ListAttributes_async] +# [END backupdr_v1_generated_BackupDR_ListBackupVaults_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py new file mode 100644 index 000000000000..27b1faa5debb --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupVaults +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupVaults_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backup_vaults(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupVaultsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_vaults(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupVaults_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py similarity index 78% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_async.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py index 33e365b5ebaa..da6366f3b095 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_specs_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListSpecs +# Snippet for ListBackups # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-cloud-backupdr -# [START apihub_v1_generated_ApiHub_ListSpecs_async] +# [START backupdr_v1_generated_BackupDR_ListBackups_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.cloud import backupdr_v1 -async def sample_list_specs(): +async def sample_list_backups(): # Create a client - client = apihub_v1.ApiHubAsyncClient() + client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - request = apihub_v1.ListSpecsRequest( + request = backupdr_v1.ListBackupsRequest( parent="parent_value", ) # Make the request - page_result = client.list_specs(request=request) + page_result = client.list_backups(request=request) # Handle the response async for response in page_result: print(response) -# [END apihub_v1_generated_ApiHub_ListSpecs_async] +# [END backupdr_v1_generated_BackupDR_ListBackups_async] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_list_teams_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py similarity index 78% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_list_teams_sync.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py index 947a825e3a3d..18387f7371a5 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_team_service_list_teams_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListTeams +# Snippet for ListBackups # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-ads-admanager +# python3 -m pip install google-cloud-backupdr -# [START admanager_v1_generated_TeamService_ListTeams_sync] +# [START backupdr_v1_generated_BackupDR_ListBackups_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.ads import admanager_v1 +from google.cloud import backupdr_v1 -def sample_list_teams(): +def sample_list_backups(): # Create a client - client = admanager_v1.TeamServiceClient() + client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - request = admanager_v1.ListTeamsRequest( + request = backupdr_v1.ListBackupsRequest( parent="parent_value", ) # Make the request - page_result = client.list_teams(request=request) + page_result = client.list_backups(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_TeamService_ListTeams_sync] +# [END backupdr_v1_generated_BackupDR_ListBackups_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py similarity index 77% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_async.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py index 341caf033a8a..f5cb4d5a4477 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_list_api_operations_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListApiOperations +# Snippet for ListDataSources # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-cloud-backupdr -# [START apihub_v1_generated_ApiHub_ListApiOperations_async] +# [START backupdr_v1_generated_BackupDR_ListDataSources_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.cloud import backupdr_v1 -async def sample_list_api_operations(): +async def sample_list_data_sources(): # Create a client - client = apihub_v1.ApiHubAsyncClient() + client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - request = apihub_v1.ListApiOperationsRequest( + request = backupdr_v1.ListDataSourcesRequest( parent="parent_value", ) # Make the request - page_result = client.list_api_operations(request=request) + page_result = client.list_data_sources(request=request) # Handle the response async for response in page_result: print(response) -# [END apihub_v1_generated_ApiHub_ListApiOperations_async] +# [END backupdr_v1_generated_BackupDR_ListDataSources_async] diff --git a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_contact_service_list_contacts_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py similarity index 77% rename from packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_contact_service_list_contacts_sync.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py index 6f17b86732e5..36680bf32e15 100644 --- a/packages/google-ads-admanager/samples/generated_samples/admanager_v1_generated_contact_service_list_contacts_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for ListContacts +# Snippet for ListDataSources # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-ads-admanager +# python3 -m pip install google-cloud-backupdr -# [START admanager_v1_generated_ContactService_ListContacts_sync] +# [START backupdr_v1_generated_BackupDR_ListDataSources_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,23 +31,23 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.ads import admanager_v1 +from google.cloud import backupdr_v1 -def sample_list_contacts(): +def sample_list_data_sources(): # Create a client - client = admanager_v1.ContactServiceClient() + client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - request = admanager_v1.ListContactsRequest( + request = backupdr_v1.ListDataSourcesRequest( parent="parent_value", ) # Make the request - page_result = client.list_contacts(request=request) + page_result = client.list_data_sources(request=request) # Handle the response for response in page_result: print(response) -# [END admanager_v1_generated_ContactService_ListContacts_sync] +# [END backupdr_v1_generated_BackupDR_ListDataSources_sync] diff --git a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py similarity index 65% rename from packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py index 460ca2fce376..9bdfab3c21bc 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for BatchProcessDocuments +# Snippet for RestoreBackup # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-documentai +# python3 -m pip install google-cloud-backupdr -# [START documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_async] +# [START backupdr_v1_generated_BackupDR_RestoreBackup_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,24 +31,25 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import documentai_v1beta2 +from google.cloud import backupdr_v1 -async def sample_batch_process_documents(): +async def sample_restore_backup(): # Create a client - client = documentai_v1beta2.DocumentUnderstandingServiceAsyncClient() + client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - requests = documentai_v1beta2.ProcessDocumentRequest() - requests.input_config.gcs_source.uri = "uri_value" - requests.input_config.mime_type = "mime_type_value" + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" - request = documentai_v1beta2.BatchProcessDocumentsRequest( - requests=requests, + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", ) # Make the request - operation = client.batch_process_documents(request=request) + operation = client.restore_backup(request=request) print("Waiting for operation to complete...") @@ -57,4 +58,4 @@ async def sample_batch_process_documents(): # Handle the response print(response) -# [END documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_async] +# [END backupdr_v1_generated_BackupDR_RestoreBackup_async] diff --git a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py similarity index 66% rename from packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py index b381b928b678..6b503fb4a546 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta2_generated_document_understanding_service_batch_process_documents_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for BatchProcessDocuments +# Snippet for RestoreBackup # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-documentai +# python3 -m pip install google-cloud-backupdr -# [START documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_sync] +# [START backupdr_v1_generated_BackupDR_RestoreBackup_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,24 +31,25 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import documentai_v1beta2 +from google.cloud import backupdr_v1 -def sample_batch_process_documents(): +def sample_restore_backup(): # Create a client - client = documentai_v1beta2.DocumentUnderstandingServiceClient() + client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - requests = documentai_v1beta2.ProcessDocumentRequest() - requests.input_config.gcs_source.uri = "uri_value" - requests.input_config.mime_type = "mime_type_value" + compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() + compute_instance_target_environment.project = "project_value" + compute_instance_target_environment.zone = "zone_value" - request = documentai_v1beta2.BatchProcessDocumentsRequest( - requests=requests, + request = backupdr_v1.RestoreBackupRequest( + compute_instance_target_environment=compute_instance_target_environment, + name="name_value", ) # Make the request - operation = client.batch_process_documents(request=request) + operation = client.restore_backup(request=request) print("Waiting for operation to complete...") @@ -57,4 +58,4 @@ def sample_batch_process_documents(): # Handle the response print(response) -# [END documentai_v1beta2_generated_DocumentUnderstandingService_BatchProcessDocuments_sync] +# [END backupdr_v1_generated_BackupDR_RestoreBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py new file mode 100644 index 000000000000..6c6c641d54ee --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TriggerBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_TriggerBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_TriggerBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py new file mode 100644 index 000000000000..359727f2dd1c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TriggerBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_TriggerBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_trigger_backup(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Make the request + operation = client.trigger_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_TriggerBackup_sync] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py similarity index 73% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_async.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py index 433fc9066d3e..a1a2fcc0ce51 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_create_api_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for CreateApi +# Snippet for UpdateBackup # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-cloud-backupdr -# [START apihub_v1_generated_ApiHub_CreateApi_async] +# [START backupdr_v1_generated_BackupDR_UpdateBackup_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,26 +31,25 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.cloud import backupdr_v1 -async def sample_create_api(): +async def sample_update_backup(): # Create a client - client = apihub_v1.ApiHubAsyncClient() + client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - api = apihub_v1.Api() - api.display_name = "display_name_value" - - request = apihub_v1.CreateApiRequest( - parent="parent_value", - api=api, + request = backupdr_v1.UpdateBackupRequest( ) # Make the request - response = await client.create_api(request=request) + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) -# [END apihub_v1_generated_ApiHub_CreateApi_async] +# [END backupdr_v1_generated_BackupDR_UpdateBackup_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py similarity index 74% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_async.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py index 4ba551de8127..9ea7e26404d0 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_api_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for UpdateApi +# Snippet for UpdateBackup # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-cloud-backupdr -# [START apihub_v1_generated_ApiHub_UpdateApi_async] +# [START backupdr_v1_generated_BackupDR_UpdateBackup_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,25 +31,25 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.cloud import backupdr_v1 -async def sample_update_api(): +def sample_update_backup(): # Create a client - client = apihub_v1.ApiHubAsyncClient() + client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - api = apihub_v1.Api() - api.display_name = "display_name_value" - - request = apihub_v1.UpdateApiRequest( - api=api, + request = backupdr_v1.UpdateBackupRequest( ) # Make the request - response = await client.update_api(request=request) + operation = client.update_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) -# [END apihub_v1_generated_ApiHub_UpdateApi_async] +# [END backupdr_v1_generated_BackupDR_UpdateBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py new file mode 100644 index 000000000000..386f2ca872d3 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupVault +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackupVault_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_update_backup_vault(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateBackupVaultRequest( + ) + + # Make the request + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackupVault_async] diff --git a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py similarity index 72% rename from packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_async.py rename to packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py index 37369a1057bc..ab3690e1df33 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/apihub_v1_generated_api_hub_update_version_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py @@ -15,15 +15,15 @@ # # Generated code. DO NOT EDIT! # -# Snippet for UpdateVersion +# Snippet for UpdateBackupVault # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-apihub +# python3 -m pip install google-cloud-backupdr -# [START apihub_v1_generated_ApiHub_UpdateVersion_async] +# [START backupdr_v1_generated_BackupDR_UpdateBackupVault_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -31,25 +31,25 @@ # - It may require specifying regional endpoints when creating the service # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import apihub_v1 +from google.cloud import backupdr_v1 -async def sample_update_version(): +def sample_update_backup_vault(): # Create a client - client = apihub_v1.ApiHubAsyncClient() + client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - version = apihub_v1.Version() - version.display_name = "display_name_value" - - request = apihub_v1.UpdateVersionRequest( - version=version, + request = backupdr_v1.UpdateBackupVaultRequest( ) # Make the request - response = await client.update_version(request=request) + operation = client.update_backup_vault(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) -# [END apihub_v1_generated_ApiHub_UpdateVersion_async] +# [END backupdr_v1_generated_BackupDR_UpdateBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py new file mode 100644 index 000000000000..986de214c53d --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateDataSource_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateDataSource_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py new file mode 100644 index 000000000000..d20aa5d93848 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataSource +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateDataSource_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_update_data_source(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.UpdateDataSourceRequest( + ) + + # Make the request + operation = client.update_data_source(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateDataSource_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json index ff879435143f..dc01534e8660 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json +++ b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json @@ -8,9 +8,540 @@ ], "language": "PYTHON", "name": "google-cloud-backupdr", - "version": "0.1.0" + "version": "0.1.4" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan_association", + "type": "google.cloud.backupdr_v1.types.BackupPlanAssociation" + }, + { + "name": "backup_plan_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup_plan_association" + }, + "description": "Sample for CreateBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan_association", + "type": "google.cloud.backupdr_v1.types.BackupPlanAssociation" + }, + { + "name": "backup_plan_association_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup_plan_association" + }, + "description": "Sample for CreateBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan", + "type": "google.cloud.backupdr_v1.types.BackupPlan" + }, + { + "name": "backup_plan_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup_plan" + }, + "description": "Sample for CreateBackupPlan", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlan_async", + "segments": [ + { + "end": 67, + "start": 27, + "type": "FULL" + }, + { + "end": 67, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 57, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 58, + "type": "REQUEST_EXECUTION" + }, + { + "end": 68, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupPlanRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_plan", + "type": "google.cloud.backupdr_v1.types.BackupPlan" + }, + { + "name": "backup_plan_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup_plan" + }, + "description": "Sample for CreateBackupPlan", + "file": "backupdr_v1_generated_backup_dr_create_backup_plan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlan_sync", + "segments": [ + { + "end": 67, + "start": 27, + "type": "FULL" + }, + { + "end": 67, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 57, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 58, + "type": "REQUEST_EXECUTION" + }, + { + "end": 68, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_plan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupVaultRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "backup_vault_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup_vault" + }, + "description": "Sample for CreateBackupVault", + "file": "backupdr_v1_generated_backup_dr_create_backup_vault_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupVault_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_vault_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateBackupVaultRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "backup_vault_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup_vault" + }, + "description": "Sample for CreateBackupVault", + "file": "backupdr_v1_generated_backup_dr_create_backup_vault_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupVault_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_backup_vault_sync.py" + }, { "canonical": true, "clientMethod": { @@ -21,28 +552,3260 @@ }, "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_management_server", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "management_server", + "type": "google.cloud.backupdr_v1.types.ManagementServer" + }, + { + "name": "management_server_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_management_server" + }, + "description": "Sample for CreateManagementServer", + "file": "backupdr_v1_generated_backup_dr_create_management_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_management_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "CreateManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "management_server", + "type": "google.cloud.backupdr_v1.types.ManagementServer" + }, + { + "name": "management_server_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_management_server" + }, + "description": "Sample for CreateManagementServer", + "file": "backupdr_v1_generated_backup_dr_create_management_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_create_management_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_plan_association" + }, + "description": "Sample for DeleteBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_plan_association" + }, + "description": "Sample for DeleteBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_plan" + }, + "description": "Sample for DeleteBackupPlan", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlan_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_plan" + }, + "description": "Sample for DeleteBackupPlan", + "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup_vault" + }, + "description": "Sample for DeleteBackupVault", + "file": "backupdr_v1_generated_backup_dr_delete_backup_vault_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupVault_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_vault_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup_vault" + }, + "description": "Sample for DeleteBackupVault", + "file": "backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupVault_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "backupdr_v1_generated_backup_dr_delete_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "backupdr_v1_generated_backup_dr_delete_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_management_server" + }, + "description": "Sample for DeleteManagementServer", + "file": "backupdr_v1_generated_backup_dr_delete_management_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_management_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "DeleteManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_management_server" + }, + "description": "Sample for DeleteManagementServer", + "file": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.fetch_usable_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "FetchUsableBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsAsyncPager", + "shortName": "fetch_usable_backup_vaults" + }, + "description": "Sample for FetchUsableBackupVaults", + "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.fetch_usable_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "FetchUsableBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsPager", + "shortName": "fetch_usable_backup_vaults" + }, + "description": "Sample for FetchUsableBackupVaults", + "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", + "shortName": "get_backup_plan_association" + }, + "description": "Sample for GetBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", + "shortName": "get_backup_plan_association" + }, + "description": "Sample for GetBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlan", + "shortName": "get_backup_plan" + }, + "description": "Sample for GetBackupPlan", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupPlan", + "shortName": "get_backup_plan" + }, + "description": "Sample for GetBackupPlan", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupVault", + "shortName": "get_backup_vault" + }, + "description": "Sample for GetBackupVault", + "file": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_vault", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackupVault" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.BackupVault", + "shortName": "get_backup_vault" + }, + "description": "Sample for GetBackupVault", + "file": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "backupdr_v1_generated_backup_dr_get_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "backupdr_v1_generated_backup_dr_get_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_data_source", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.DataSource", + "shortName": "get_data_source" + }, + "description": "Sample for GetDataSource", + "file": "backupdr_v1_generated_backup_dr_get_data_source_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_data_source_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_data_source", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetDataSource" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.DataSource", + "shortName": "get_data_source" + }, + "description": "Sample for GetDataSource", + "file": "backupdr_v1_generated_backup_dr_get_data_source_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_data_source_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.ManagementServer", + "shortName": "get_management_server" + }, + "description": "Sample for GetManagementServer", + "file": "backupdr_v1_generated_backup_dr_get_management_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_management_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_management_server", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "GetManagementServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.types.ManagementServer", + "shortName": "get_management_server" + }, + "description": "Sample for GetManagementServer", + "file": "backupdr_v1_generated_backup_dr_get_management_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_get_management_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plan_associations", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlanAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager", + "shortName": "list_backup_plan_associations" + }, + "description": "Sample for ListBackupPlanAssociations", + "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plan_associations", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlanAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager", + "shortName": "list_backup_plan_associations" + }, + "description": "Sample for ListBackupPlanAssociations", + "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plans", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlans" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansAsyncPager", + "shortName": "list_backup_plans" + }, + "description": "Sample for ListBackupPlans", + "file": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plans", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupPlans" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansPager", + "shortName": "list_backup_plans" + }, + "description": "Sample for ListBackupPlans", + "file": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsAsyncPager", + "shortName": "list_backup_vaults" + }, + "description": "Sample for ListBackupVaults", + "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_vaults", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackupVaults" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsPager", + "shortName": "list_backup_vaults" + }, + "description": "Sample for ListBackupVaults", + "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backups", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsAsyncPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "backupdr_v1_generated_backup_dr_list_backups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backups", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "backupdr_v1_generated_backup_dr_list_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_data_sources", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListDataSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesAsyncPager", + "shortName": "list_data_sources" + }, + "description": "Sample for ListDataSources", + "file": "backupdr_v1_generated_backup_dr_list_data_sources_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_data_sources_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_data_sources", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListDataSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesPager", + "shortName": "list_data_sources" + }, + "description": "Sample for ListDataSources", + "file": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_management_servers", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListManagementServers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersAsyncPager", + "shortName": "list_management_servers" + }, + "description": "Sample for ListManagementServers", + "file": "backupdr_v1_generated_backup_dr_list_management_servers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_management_servers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_management_servers", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListManagementServers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersPager", + "shortName": "list_management_servers" + }, + "description": "Sample for ListManagementServers", + "file": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.restore_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "RestoreBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_backup" + }, + "description": "Sample for RestoreBackup", + "file": "backupdr_v1_generated_backup_dr_restore_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_restore_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.restore_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "CreateManagementServer" + "shortName": "RestoreBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "management_server", - "type": "google.cloud.backupdr_v1.types.ManagementServer" + "name": "retry", + "type": "google.api_core.retry.Retry" }, { - "name": "management_server_id", + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_backup" + }, + "description": "Sample for RestoreBackup", + "file": "backupdr_v1_generated_backup_dr_restore_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_restore_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.trigger_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "TriggerBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "rule_id", "type": "str" }, { @@ -59,13 +3822,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_management_server" + "shortName": "trigger_backup" }, - "description": "Sample for CreateManagementServer", - "file": "backupdr_v1_generated_backup_dr_create_management_server_async.py", + "description": "Sample for TriggerBackup", + "file": "backupdr_v1_generated_backup_dr_trigger_backup_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_async", + "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_async", "segments": [ { "end": 56, @@ -98,7 +3861,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_create_management_server_async.py" + "title": "backupdr_v1_generated_backup_dr_trigger_backup_async.py" }, { "canonical": true, @@ -107,30 +3870,26 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.trigger_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "CreateManagementServer" + "shortName": "TriggerBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { - "name": "management_server", - "type": "google.cloud.backupdr_v1.types.ManagementServer" - }, - { - "name": "management_server_id", + "name": "rule_id", "type": "str" }, { @@ -147,13 +3906,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_management_server" + "shortName": "trigger_backup" }, - "description": "Sample for CreateManagementServer", - "file": "backupdr_v1_generated_backup_dr_create_management_server_sync.py", + "description": "Sample for TriggerBackup", + "file": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_sync", + "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_sync", "segments": [ { "end": 56, @@ -186,7 +3945,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_create_management_server_sync.py" + "title": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py" }, { "canonical": true, @@ -196,23 +3955,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_backup_vault", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupVault", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "DeleteManagementServer" + "shortName": "UpdateBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupVaultRequest" }, { - "name": "name", - "type": "str" + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -228,21 +3991,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_management_server" + "shortName": "update_backup_vault" }, - "description": "Sample for DeleteManagementServer", - "file": "backupdr_v1_generated_backup_dr_delete_management_server_async.py", + "description": "Sample for UpdateBackupVault", + "file": "backupdr_v1_generated_backup_dr_update_backup_vault_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_async", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupVault_async", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -252,22 +4015,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_delete_management_server_async.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_vault_async.py" }, { "canonical": true, @@ -276,23 +4039,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_backup_vault", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupVault", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "DeleteManagementServer" + "shortName": "UpdateBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupVaultRequest" }, { - "name": "name", - "type": "str" + "name": "backup_vault", + "type": "google.cloud.backupdr_v1.types.BackupVault" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -308,21 +4075,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_management_server" + "shortName": "update_backup_vault" }, - "description": "Sample for DeleteManagementServer", - "file": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py", + "description": "Sample for UpdateBackupVault", + "file": "backupdr_v1_generated_backup_dr_update_backup_vault_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_sync", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupVault_sync", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -332,22 +4099,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_vault_sync.py" }, { "canonical": true, @@ -357,23 +4124,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetManagementServer" + "shortName": "UpdateBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupRequest" }, { - "name": "name", - "type": "str" + "name": "backup", + "type": "google.cloud.backupdr_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -388,22 +4159,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.types.ManagementServer", - "shortName": "get_management_server" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_backup" }, - "description": "Sample for GetManagementServer", - "file": "backupdr_v1_generated_backup_dr_get_management_server_async.py", + "description": "Sample for UpdateBackup", + "file": "backupdr_v1_generated_backup_dr_update_backup_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_async", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackup_async", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -413,22 +4184,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_management_server_async.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_async.py" }, { "canonical": true, @@ -437,23 +4208,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetManagementServer" + "shortName": "UpdateBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.UpdateBackupRequest" }, { - "name": "name", - "type": "str" + "name": "backup", + "type": "google.cloud.backupdr_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -468,22 +4243,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.types.ManagementServer", - "shortName": "get_management_server" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_backup" }, - "description": "Sample for GetManagementServer", - "file": "backupdr_v1_generated_backup_dr_get_management_server_sync.py", + "description": "Sample for UpdateBackup", + "file": "backupdr_v1_generated_backup_dr_update_backup_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_sync", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackup_sync", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -493,22 +4268,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_management_server_sync.py" + "title": "backupdr_v1_generated_backup_dr_update_backup_sync.py" }, { "canonical": true, @@ -518,23 +4293,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_management_servers", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_data_source", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateDataSource", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListManagementServers" + "shortName": "UpdateDataSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + "type": "google.cloud.backupdr_v1.types.UpdateDataSourceRequest" }, { - "name": "parent", - "type": "str" + "name": "data_source", + "type": "google.cloud.backupdr_v1.types.DataSource" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -549,22 +4328,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersAsyncPager", - "shortName": "list_management_servers" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_data_source" }, - "description": "Sample for ListManagementServers", - "file": "backupdr_v1_generated_backup_dr_list_management_servers_async.py", + "description": "Sample for UpdateDataSource", + "file": "backupdr_v1_generated_backup_dr_update_data_source_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_async", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateDataSource_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -574,22 +4353,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_management_servers_async.py" + "title": "backupdr_v1_generated_backup_dr_update_data_source_async.py" }, { "canonical": true, @@ -598,23 +4377,27 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_management_servers", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_data_source", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateDataSource", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListManagementServers" + "shortName": "UpdateDataSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + "type": "google.cloud.backupdr_v1.types.UpdateDataSourceRequest" }, { - "name": "parent", - "type": "str" + "name": "data_source", + "type": "google.cloud.backupdr_v1.types.DataSource" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -629,22 +4412,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersPager", - "shortName": "list_management_servers" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_data_source" }, - "description": "Sample for ListManagementServers", - "file": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py", + "description": "Sample for UpdateDataSource", + "file": "backupdr_v1_generated_backup_dr_update_data_source_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_sync", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateDataSource_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -654,22 +4437,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py" + "title": "backupdr_v1_generated_backup_dr_update_data_source_sync.py" } ] } diff --git a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py index b65698148046..c0dd15568f46 100644 --- a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py +++ b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py @@ -39,10 +39,33 @@ def partition( class backupdrCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_backup_plan': ('parent', 'backup_plan_id', 'backup_plan', 'request_id', ), + 'create_backup_plan_association': ('parent', 'backup_plan_association_id', 'backup_plan_association', 'request_id', ), + 'create_backup_vault': ('parent', 'backup_vault_id', 'backup_vault', 'request_id', 'validate_only', ), 'create_management_server': ('parent', 'management_server_id', 'management_server', 'request_id', ), + 'delete_backup': ('name', 'request_id', ), + 'delete_backup_plan': ('name', 'request_id', ), + 'delete_backup_plan_association': ('name', 'request_id', ), + 'delete_backup_vault': ('name', 'request_id', 'force', 'etag', 'validate_only', 'allow_missing', ), 'delete_management_server': ('name', 'request_id', ), + 'fetch_usable_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'get_backup': ('name', 'view', ), + 'get_backup_plan': ('name', ), + 'get_backup_plan_association': ('name', ), + 'get_backup_vault': ('name', 'view', ), + 'get_data_source': ('name', ), 'get_management_server': ('name', ), + 'list_backup_plan_associations': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_backup_plans': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), + 'list_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), + 'list_data_sources': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_management_servers': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'restore_backup': ('name', 'request_id', 'compute_instance_target_environment', 'compute_instance_restore_properties', ), + 'trigger_backup': ('name', 'rule_id', 'request_id', ), + 'update_backup': ('update_mask', 'backup', 'request_id', ), + 'update_backup_vault': ('update_mask', 'backup_vault', 'request_id', 'validate_only', 'force', ), + 'update_data_source': ('update_mask', 'data_source', 'request_id', 'allow_missing', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py index 1b1c7ad74e91..ee098d5a5646 100644 --- a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py +++ b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py @@ -48,10 +48,16 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import month_pb2 # type: ignore import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers @@ -66,7 +72,14 @@ pagers, transports, ) -from google.cloud.backupdr_v1.types import backupdr +from google.cloud.backupdr_v1.types import ( + backupdr, + backupplan, + backupplanassociation, + backupvault, + backupvault_ba, + backupvault_gce, +) def client_cert_source_callback(): @@ -2911,52 +2924,98 @@ async def test_delete_management_server_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - backupdr.ListManagementServersRequest, + backupvault.CreateBackupVaultRequest, dict, ], ) -def test_list_management_servers_rest(request_type): +def test_create_backup_vault(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.CreateBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.create_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.CreateBackupVaultRequest() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_management_servers(request) +def test_create_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListManagementServersPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.CreateBackupVaultRequest( + parent="parent_value", + backup_vault_id="backup_vault_id_value", + ) -def test_list_management_servers_rest_use_cached_wrapped_rpc(): +def test_create_backup_vault_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -2965,8 +3024,7 @@ def test_list_management_servers_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_management_servers - in client._transport._wrapped_methods + client._transport.create_backup_vault in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -2975,128 +3033,18962 @@ def test_list_management_servers_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_management_servers + client._transport.create_backup_vault ] = mock_rpc - request = {} - client.list_management_servers(request) + client.create_backup_vault(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_management_servers(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_management_servers_rest_required_fields( - request_type=backupdr.ListManagementServersRequest, -): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_create_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.CreateBackupVaultRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_management_servers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with default values are now present +@pytest.mark.asyncio +async def test_create_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - jsonified_request["parent"] = "parent_value" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_management_servers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", + # Ensure method has been cached + assert ( + client._client._transport.create_backup_vault + in client._client._transport._wrapped_methods ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup_vault + ] = mock_rpc - client = BackupDRClient( + request = {} + await client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.CreateBackupVaultRequest +): + client = BackupDRAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_vault(request) - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.CreateBackupVaultRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - response = client.list_management_servers(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_create_backup_vault_async_from_dict(): + await test_create_backup_vault_async(request_type=dict) -def test_list_management_servers_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_create_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_management_servers._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.CreateBackupVaultRequest() + request.parent = "parent_value" -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_management_servers_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.CreateBackupVaultRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_vault( + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].backup_vault_id + mock_val = "backup_vault_id_value" + assert arg == mock_val + + +def test_create_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_vault( + backupvault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_vault( + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].backup_vault_id + mock_val = "backup_vault_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_vault( + backupvault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupVaultsRequest, + dict, + ], +) +def test_list_backup_vaults(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_vaults_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupVaultsRequest() + + +def test_list_backup_vaults_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.ListBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_vaults(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_backup_vaults_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_vaults in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_vaults + ] = mock_rpc + request = {} + client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_vaults_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupVaultsRequest() + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_vaults + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_vaults + ] = mock_rpc + + request = {} + await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async( + transport: str = "grpc_asyncio", request_type=backupvault.ListBackupVaultsRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_from_dict(): + await test_list_backup_vaults_async(request_type=dict) + + +def test_list_backup_vaults_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value = backupvault.ListBackupVaultsResponse() + client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_vaults_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse() + ) + await client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_vaults_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupVaultsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_vaults_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_vaults( + backupvault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_vaults_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupVaultsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_vaults_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_vaults( + backupvault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_list_backup_vaults_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_vaults(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + +def test_list_backup_vaults_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_vaults(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_vaults( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_vaults_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_vaults(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.FetchUsableBackupVaultsRequest, + dict, + ], +) +def test_fetch_usable_backup_vaults(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.FetchUsableBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchUsableBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_fetch_usable_backup_vaults_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_usable_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.FetchUsableBackupVaultsRequest() + + +def test_fetch_usable_backup_vaults_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.FetchUsableBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_usable_backup_vaults(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.FetchUsableBackupVaultsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_fetch_usable_backup_vaults_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_usable_backup_vaults + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_usable_backup_vaults + ] = mock_rpc + request = {} + client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_usable_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.fetch_usable_backup_vaults() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.FetchUsableBackupVaultsRequest() + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.fetch_usable_backup_vaults + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.fetch_usable_backup_vaults + ] = mock_rpc + + request = {} + await client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.fetch_usable_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async( + transport: str = "grpc_asyncio", + request_type=backupvault.FetchUsableBackupVaultsRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.FetchUsableBackupVaultsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchUsableBackupVaultsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_from_dict(): + await test_fetch_usable_backup_vaults_async(request_type=dict) + + +def test_fetch_usable_backup_vaults_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.FetchUsableBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value = backupvault.FetchUsableBackupVaultsResponse() + client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.FetchUsableBackupVaultsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse() + ) + await client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_fetch_usable_backup_vaults_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.FetchUsableBackupVaultsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_usable_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_fetch_usable_backup_vaults_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_usable_backup_vaults( + backupvault.FetchUsableBackupVaultsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.FetchUsableBackupVaultsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_usable_backup_vaults( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_usable_backup_vaults( + backupvault.FetchUsableBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_fetch_usable_backup_vaults_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.fetch_usable_backup_vaults( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + +def test_fetch_usable_backup_vaults_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_usable_backup_vaults(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_usable_backup_vaults( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_usable_backup_vaults(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupVaultRequest, + dict, + ], +) +def test_get_backup_vault(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + response = client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.BackupVault) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.deletable is True + assert response.etag == "etag_value" + assert response.state == backupvault.BackupVault.State.CREATING + assert response.backup_count == 1278 + assert response.service_account == "service_account_value" + assert response.total_stored_bytes == 1946 + assert response.uid == "uid_value" + assert ( + response.access_restriction + == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT + ) + + +def test_get_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupVaultRequest() + + +def test_get_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.GetBackupVaultRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupVaultRequest( + name="name_value", + ) + + +def test_get_backup_vault_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_vault in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_vault + ] = mock_rpc + request = {} + client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + ) + response = await client.get_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_get_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_vault + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_vault + ] = mock_rpc + + request = {} + await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.GetBackupVaultRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + ) + response = await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.BackupVault) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.deletable is True + assert response.etag == "etag_value" + assert response.state == backupvault.BackupVault.State.CREATING + assert response.backup_count == 1278 + assert response.service_account == "service_account_value" + assert response.total_stored_bytes == 1946 + assert response.uid == "uid_value" + assert ( + response.access_restriction + == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT + ) + + +@pytest.mark.asyncio +async def test_get_backup_vault_async_from_dict(): + await test_get_backup_vault_async(request_type=dict) + + +def test_get_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value = backupvault.BackupVault() + client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault() + ) + await client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.BackupVault() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_vault( + backupvault.GetBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.BackupVault() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_vault( + backupvault.GetBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupVaultRequest, + dict, + ], +) +def test_update_backup_vault(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupVaultRequest() + + +def test_update_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.UpdateBackupVaultRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupVaultRequest() + + +def test_update_backup_vault_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_backup_vault + ] = mock_rpc + request = {} + client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_update_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_backup_vault + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_backup_vault + ] = mock_rpc + + request = {} + await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.UpdateBackupVaultRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_vault_async_from_dict(): + await test_update_backup_vault_async(request_type=dict) + + +def test_update_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupVaultRequest() + + request.backup_vault.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_vault.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupVaultRequest() + + request.backup_vault.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup_vault.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup_vault( + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_vault( + backupvault.UpdateBackupVaultRequest(), + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup_vault( + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup_vault + mock_val = backupvault.BackupVault(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup_vault( + backupvault.UpdateBackupVaultRequest(), + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupVaultRequest, + dict, + ], +) +def test_delete_backup_vault(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_vault_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupVaultRequest() + + +def test_delete_backup_vault_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.DeleteBackupVaultRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_vault(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupVaultRequest( + name="name_value", + etag="etag_value", + ) + + +def test_delete_backup_vault_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_vault + ] = mock_rpc + request = {} + client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_vault_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_vault() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupVaultRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_vault + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_vault + ] = mock_rpc + + request = {} + await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async( + transport: str = "grpc_asyncio", request_type=backupvault.DeleteBackupVaultRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupVaultRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_vault_async_from_dict(): + await test_delete_backup_vault_async(request_type=dict) + + +def test_delete_backup_vault_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_vault_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupVaultRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_vault_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_vault_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_vault( + backupvault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_vault_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_vault( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_vault_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_vault( + backupvault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListDataSourcesRequest, + dict, + ], +) +def test_list_data_sources(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.ListDataSourcesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_data_sources_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_data_sources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListDataSourcesRequest() + + +def test_list_data_sources_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.ListDataSourcesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_data_sources(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListDataSourcesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_data_sources_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_sources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_sources + ] = mock_rpc + request = {} + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_sources_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_data_sources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListDataSourcesRequest() + + +@pytest.mark.asyncio +async def test_list_data_sources_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_data_sources + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_data_sources + ] = mock_rpc + + request = {} + await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_sources_async( + transport: str = "grpc_asyncio", request_type=backupvault.ListDataSourcesRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.ListDataSourcesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_data_sources_async_from_dict(): + await test_list_data_sources_async(request_type=dict) + + +def test_list_data_sources_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListDataSourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value = backupvault.ListDataSourcesResponse() + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_data_sources_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListDataSourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse() + ) + await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_data_sources_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListDataSourcesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_sources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_data_sources_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + backupvault.ListDataSourcesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListDataSourcesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_sources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_sources( + backupvault.ListDataSourcesRequest(), + parent="parent_value", + ) + + +def test_list_data_sources_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_sources(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.DataSource) for i in results) + + +def test_list_data_sources_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_sources(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_sources( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.DataSource) for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_sources(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetDataSourceRequest, + dict, + ], +) +def test_get_data_source(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + response = client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.GetDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.DataSource) + assert response.name == "name_value" + assert response.state == backupvault.DataSource.State.CREATING + assert response.backup_count == 1278 + assert response.etag == "etag_value" + assert response.total_stored_bytes == 1946 + assert response.config_state == backupvault.BackupConfigState.ACTIVE + + +def test_get_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetDataSourceRequest() + + +def test_get_data_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.GetDataSourceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetDataSourceRequest( + name="name_value", + ) + + +def test_get_data_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc + request = {} + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_source_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + ) + response = await client.get_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetDataSourceRequest() + + +@pytest.mark.asyncio +async def test_get_data_source_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_data_source + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_source + ] = mock_rpc + + request = {} + await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_source_async( + transport: str = "grpc_asyncio", request_type=backupvault.GetDataSourceRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + ) + response = await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.GetDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.DataSource) + assert response.name == "name_value" + assert response.state == backupvault.DataSource.State.CREATING + assert response.backup_count == 1278 + assert response.etag == "etag_value" + assert response.total_stored_bytes == 1946 + assert response.config_state == backupvault.BackupConfigState.ACTIVE + + +@pytest.mark.asyncio +async def test_get_data_source_async_from_dict(): + await test_get_data_source_async(request_type=dict) + + +def test_get_data_source_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value = backupvault.DataSource() + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_data_source_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource() + ) + await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_data_source_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.DataSource() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_data_source_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + backupvault.GetDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.DataSource() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_source( + backupvault.GetDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateDataSourceRequest, + dict, + ], +) +def test_update_data_source(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateDataSourceRequest() + + +def test_update_data_source_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.UpdateDataSourceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_data_source(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateDataSourceRequest() + + +def test_update_data_source_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_source + ] = mock_rpc + request = {} + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_source_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateDataSourceRequest() + + +@pytest.mark.asyncio +async def test_update_data_source_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_data_source + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_data_source + ] = mock_rpc + + request = {} + await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_source_async( + transport: str = "grpc_asyncio", request_type=backupvault.UpdateDataSourceRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateDataSourceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_data_source_async_from_dict(): + await test_update_data_source_async(request_type=dict) + + +def test_update_data_source_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateDataSourceRequest() + + request.data_source.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_source.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_data_source_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateDataSourceRequest() + + request.data_source.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_source.name=name_value", + ) in kw["metadata"] + + +def test_update_data_source_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_source( + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_source + mock_val = backupvault.DataSource(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_data_source_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_data_source_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_source( + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_source + mock_val = backupvault.DataSource(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_data_source_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupsRequest, + dict, + ], +) +def test_list_backups(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupsRequest() + + +def test_list_backups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.ListBackupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_backups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backups_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.ListBackupsRequest() + + +@pytest.mark.asyncio +async def test_list_backups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backups + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backups + ] = mock_rpc + + request = {} + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backups_async( + transport: str = "grpc_asyncio", request_type=backupvault.ListBackupsRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + + +def test_list_backups_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = backupvault.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.ListBackupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse() + ) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backups_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backups_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backups(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.Backup) for i in results) + + +def test_list_backups_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.Backup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupRequest, + dict, + ], +) +def test_get_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + + +def test_get_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupRequest() + + +def test_get_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.GetBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupRequest( + name="name_value", + ) + + +def test_get_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + ) + response = await client.get_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupRequest() + + +@pytest.mark.asyncio +async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup + ] = mock_rpc + + request = {} + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.GetBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + ) + response = await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + + +@pytest.mark.asyncio +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) + + +def test_get_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = backupvault.Backup() + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.GetBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupRequest, + dict, + ], +) +def test_update_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupRequest() + + +def test_update_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.UpdateBackupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupRequest() + + +def test_update_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.UpdateBackupRequest() + + +@pytest.mark.asyncio +async def test_update_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_backup + ] = mock_rpc + + request = {} + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.UpdateBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.UpdateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) + + +def test_update_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.UpdateBackupRequest() + + request.backup.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "backup.name=name_value", + ) in kw["metadata"] + + +def test_update_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = backupvault.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = backupvault.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupRequest() + + +def test_delete_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.DeleteBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupRequest( + name="name_value", + ) + + +def test_delete_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.DeleteBackupRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup + ] = mock_rpc + + request = {} + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.DeleteBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + + +def test_delete_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.DeleteBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backupvault.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + backupvault.DeleteBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.RestoreBackupRequest, + dict, + ], +) +def test_restore_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.RestoreBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.restore_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.RestoreBackupRequest() + + +def test_restore_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.RestoreBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.restore_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.RestoreBackupRequest( + name="name_value", + ) + + +def test_restore_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc + request = {} + client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_restore_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.RestoreBackupRequest() + + +@pytest.mark.asyncio +async def test_restore_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.restore_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.restore_backup + ] = mock_rpc + + request = {} + await client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.restore_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_restore_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.RestoreBackupRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupvault.RestoreBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_backup_async_from_dict(): + await test_restore_backup_async(request_type=dict) + + +def test_restore_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.RestoreBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_restore_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupvault.RestoreBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_restore_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.restore_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_restore_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_restore_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.restore_backup( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_restore_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.CreateBackupPlanRequest, + dict, + ], +) +def test_create_backup_plan(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.CreateBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_plan_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.CreateBackupPlanRequest() + + +def test_create_backup_plan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + ) + + +def test_create_backup_plan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan + ] = mock_rpc + request = {} + client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.CreateBackupPlanRequest() + + +@pytest.mark.asyncio +async def test_create_backup_plan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_backup_plan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup_plan + ] = mock_rpc + + request = {} + await client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.CreateBackupPlanRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.CreateBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_plan_async_from_dict(): + await test_create_backup_plan_async(request_type=dict) + + +def test_create_backup_plan_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.CreateBackupPlanRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_plan_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.CreateBackupPlanRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_plan_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_plan( + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan + mock_val = backupplan.BackupPlan(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_id + mock_val = "backup_plan_id_value" + assert arg == mock_val + + +def test_create_backup_plan_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_plan_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_plan( + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan + mock_val = backupplan.BackupPlan(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_id + mock_val = "backup_plan_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_plan_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.GetBackupPlanRequest, + dict, + ], +) +def test_get_backup_plan(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + response = client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.GetBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + + +def test_get_backup_plan_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.GetBackupPlanRequest() + + +def test_get_backup_plan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.GetBackupPlanRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.GetBackupPlanRequest( + name="name_value", + ) + + +def test_get_backup_plan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_plan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc + request = {} + client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + ) + response = await client.get_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.GetBackupPlanRequest() + + +@pytest.mark.asyncio +async def test_get_backup_plan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_plan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_plan + ] = mock_rpc + + request = {} + await client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.GetBackupPlanRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + ) + response = await client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.GetBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + + +@pytest.mark.asyncio +async def test_get_backup_plan_async_from_dict(): + await test_get_backup_plan_async(request_type=dict) + + +def test_get_backup_plan_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.GetBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value = backupplan.BackupPlan() + client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_plan_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.GetBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan() + ) + await client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_plan_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.BackupPlan() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_plan_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_plan_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.BackupPlan() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_plan_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.ListBackupPlansRequest, + dict, + ], +) +def test_list_backup_plans(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.ListBackupPlansRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_plans_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plans() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest() + + +def test_list_backup_plans_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.ListBackupPlansRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plans(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_backup_plans_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_plans in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plans + ] = mock_rpc + request = {} + client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plans_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plans() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest() + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_plans + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_plans + ] = mock_rpc + + request = {} + await client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_plans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plans_async( + transport: str = "grpc_asyncio", request_type=backupplan.ListBackupPlansRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.ListBackupPlansRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_from_dict(): + await test_list_backup_plans_async(request_type=dict) + + +def test_list_backup_plans_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.ListBackupPlansRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value = backupplan.ListBackupPlansResponse() + client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_plans_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.ListBackupPlansRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse() + ) + await client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_plans_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_plans( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_plans_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_plans_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_plans( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_plans_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +def test_list_backup_plans_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_plans(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in results) + + +def test_list_backup_plans_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_plans(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_plans( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_plans(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.DeleteBackupPlanRequest, + dict, + ], +) +def test_delete_backup_plan(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.DeleteBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_plan_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.DeleteBackupPlanRequest() + + +def test_delete_backup_plan_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.DeleteBackupPlanRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.DeleteBackupPlanRequest( + name="name_value", + ) + + +def test_delete_backup_plan_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan + ] = mock_rpc + request = {} + client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.DeleteBackupPlanRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_plan_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_plan + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_plan + ] = mock_rpc + + request = {} + await client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.DeleteBackupPlanRequest +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplan.DeleteBackupPlanRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_async_from_dict(): + await test_delete_backup_plan_async(request_type=dict) + + +def test_delete_backup_plan_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.DeleteBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_plan_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplan.DeleteBackupPlanRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_plan_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_plan_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_plan( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_plan_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.CreateBackupPlanAssociationRequest, + dict, + ], +) +def test_create_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.CreateBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_plan_association_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest() + + +def test_create_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + ) + + +def test_create_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan_association + ] = mock_rpc + request = {} + client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest() + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_backup_plan_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_backup_plan_association + ] = mock_rpc + + request = {} + await client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.CreateBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_async_from_dict(): + await test_create_backup_plan_association_async(request_type=dict) + + +def test_create_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.CreateBackupPlanAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.CreateBackupPlanAssociationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_plan_association( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_association_id + mock_val = "backup_plan_association_id_value" + assert arg == mock_val + + +def test_create_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_plan_association( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_association_id + mock_val = "backup_plan_association_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.GetBackupPlanAssociationRequest, + dict, + ], +) +def test_get_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + response = client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.GetBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + + +def test_get_backup_plan_association_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest() + + +def test_get_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest( + name="name_value", + ) + + +def test_get_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_plan_association + ] = mock_rpc + request = {} + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + ) + response = await client.get_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest() + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_plan_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_plan_association + ] = mock_rpc + + request = {} + await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.GetBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + ) + response = await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.GetBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async_from_dict(): + await test_get_backup_plan_association_async(request_type=dict) + + +def test_get_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.GetBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value = backupplanassociation.BackupPlanAssociation() + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.GetBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation() + ) + await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.ListBackupPlanAssociationsRequest, + dict, + ], +) +def test_list_backup_plan_associations(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.ListBackupPlanAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlanAssociationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_plan_associations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plan_associations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest() + + +def test_list_backup_plan_associations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.ListBackupPlanAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plan_associations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_backup_plan_associations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_plan_associations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plan_associations + ] = mock_rpc + request = {} + client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plan_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plan_associations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest() + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_plan_associations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_plan_associations + ] = mock_rpc + + request = {} + await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_plan_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.ListBackupPlanAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlanAssociationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_from_dict(): + await test_list_backup_plan_associations_async(request_type=dict) + + +def test_list_backup_plan_associations_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.ListBackupPlanAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.ListBackupPlanAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) + await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_plan_associations_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_plan_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_plan_associations_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_plan_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", + ) + + +def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_plan_associations( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results + ) + + +def test_list_backup_plan_associations_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_plan_associations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_pager(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_plan_associations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_pages(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_plan_associations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.DeleteBackupPlanAssociationRequest, + dict, + ], +) +def test_delete_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_plan_association_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest() + + +def test_delete_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + +def test_delete_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan_association + ] = mock_rpc + request = {} + client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan_association() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest() + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_plan_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_plan_association + ] = mock_rpc + + request = {} + await client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async_from_dict(): + await test_delete_backup_plan_association_async(request_type=dict) + + +def test_delete_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.TriggerBackupRequest, + dict, + ], +) +def test_trigger_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.TriggerBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_trigger_backup_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.trigger_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.TriggerBackupRequest() + + +def test_trigger_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.trigger_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + +def test_trigger_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.trigger_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc + request = {} + client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.trigger_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_trigger_backup_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.trigger_backup() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.TriggerBackupRequest() + + +@pytest.mark.asyncio +async def test_trigger_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.trigger_backup + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.trigger_backup + ] = mock_rpc + + request = {} + await client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.trigger_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_trigger_backup_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.TriggerBackupRequest, +): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.TriggerBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_trigger_backup_async_from_dict(): + await test_trigger_backup_async(request_type=dict) + + +def test_trigger_backup_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.TriggerBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_trigger_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.TriggerBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_trigger_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.trigger_backup( + name="name_value", + rule_id="rule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val + + +def test_trigger_backup_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), + name="name_value", + rule_id="rule_id_value", + ) + + +@pytest.mark.asyncio +async def test_trigger_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.trigger_backup( + name="name_value", + rule_id="rule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_trigger_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), + name="name_value", + rule_id="rule_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.ListManagementServersRequest, + dict, + ], +) +def test_list_management_servers_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ListManagementServersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_management_servers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListManagementServersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_management_servers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_management_servers + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_management_servers + ] = mock_rpc + + request = {} + client.list_management_servers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_management_servers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_management_servers_rest_required_fields( + request_type=backupdr.ListManagementServersRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_management_servers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_management_servers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupdr.ListManagementServersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_management_servers(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_management_servers_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_management_servers._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_management_servers_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_management_servers" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_management_servers" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.ListManagementServersRequest.pb( + backupdr.ListManagementServersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupdr.ListManagementServersResponse.to_json( + backupdr.ListManagementServersResponse() + ) + + request = backupdr.ListManagementServersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupdr.ListManagementServersResponse() + + client.list_management_servers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_management_servers_rest_bad_request( + transport: str = "rest", request_type=backupdr.ListManagementServersRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_management_servers(request) + + +def test_list_management_servers_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ListManagementServersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_management_servers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/managementServers" + % client.transport._host, + args[1], + ) + + +def test_list_management_servers_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_management_servers( + backupdr.ListManagementServersRequest(), + parent="parent_value", + ) + + +def test_list_management_servers_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + backupdr.ManagementServer(), + backupdr.ManagementServer(), + ], + next_page_token="abc", + ), + backupdr.ListManagementServersResponse( + management_servers=[], + next_page_token="def", + ), + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + ], + next_page_token="ghi", + ), + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + backupdr.ManagementServer(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupdr.ListManagementServersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_management_servers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupdr.ManagementServer) for i in results) + + pages = list(client.list_management_servers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.GetManagementServerRequest, + dict, + ], +) +def test_get_management_server_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ManagementServer( + name="name_value", + description="description_value", + type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, + state=backupdr.ManagementServer.InstanceState.CREATING, + etag="etag_value", + oauth2_client_id="oauth2_client_id_value", + ba_proxy_uri=["ba_proxy_uri_value"], + satisfies_pzi=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ManagementServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_management_server(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupdr.ManagementServer) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.type_ == backupdr.ManagementServer.InstanceType.BACKUP_RESTORE + assert response.state == backupdr.ManagementServer.InstanceState.CREATING + assert response.etag == "etag_value" + assert response.oauth2_client_id == "oauth2_client_id_value" + assert response.ba_proxy_uri == ["ba_proxy_uri_value"] + assert response.satisfies_pzi is True + + +def test_get_management_server_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_management_server + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_management_server + ] = mock_rpc + + request = {} + client.get_management_server(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_management_server(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_management_server_rest_required_fields( + request_type=backupdr.GetManagementServerRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupdr.ManagementServer() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupdr.ManagementServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_management_server(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_management_server_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_management_server._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.GetManagementServerRequest.pb( + backupdr.GetManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupdr.ManagementServer.to_json( + backupdr.ManagementServer() + ) + + request = backupdr.GetManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupdr.ManagementServer() + + client.get_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_management_server_rest_bad_request( + transport: str = "rest", request_type=backupdr.GetManagementServerRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_management_server(request) + + +def test_get_management_server_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ManagementServer() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ManagementServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_management_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/managementServers/*}" + % client.transport._host, + args[1], + ) + + +def test_get_management_server_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_management_server( + backupdr.GetManagementServerRequest(), + name="name_value", + ) + + +def test_get_management_server_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.CreateManagementServerRequest, + dict, + ], +) +def test_create_management_server_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["management_server"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "type_": 1, + "management_uri": {"web_ui": "web_ui_value", "api": "api_value"}, + "workforce_identity_based_management_uri": { + "first_party_management_uri": "first_party_management_uri_value", + "third_party_management_uri": "third_party_management_uri_value", + }, + "state": 1, + "networks": [{"network": "network_value", "peering_mode": 1}], + "etag": "etag_value", + "oauth2_client_id": "oauth2_client_id_value", + "workforce_identity_based_oauth2_client_id": { + "first_party_oauth2_client_id": "first_party_oauth2_client_id_value", + "third_party_oauth2_client_id": "third_party_oauth2_client_id_value", + }, + "ba_proxy_uri": ["ba_proxy_uri_value1", "ba_proxy_uri_value2"], + "satisfies_pzs": {"value": True}, + "satisfies_pzi": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupdr.CreateManagementServerRequest.meta.fields["management_server"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["management_server"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["management_server"][field])): + del request_init["management_server"][field][i][subfield] + else: + del request_init["management_server"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_management_server(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_management_server_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_management_server + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_management_server + ] = mock_rpc + + request = {} + client.create_management_server(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_management_server(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_management_server_rest_required_fields( + request_type=backupdr.CreateManagementServerRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["management_server_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "managementServerId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "managementServerId" in jsonified_request + assert ( + jsonified_request["managementServerId"] == request_init["management_server_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["managementServerId"] = "management_server_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_management_server._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "management_server_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "managementServerId" in jsonified_request + assert jsonified_request["managementServerId"] == "management_server_id_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_management_server(request) + + expected_params = [ + ( + "managementServerId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_management_server_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_management_server._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "managementServerId", + "requestId", + ) + ) + & set( + ( + "parent", + "managementServerId", + "managementServer", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.CreateManagementServerRequest.pb( + backupdr.CreateManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupdr.CreateManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_management_server_rest_bad_request( + transport: str = "rest", request_type=backupdr.CreateManagementServerRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_management_server(request) + + +def test_create_management_server_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + management_server=backupdr.ManagementServer(name="name_value"), + management_server_id="management_server_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_management_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/managementServers" + % client.transport._host, + args[1], + ) + + +def test_create_management_server_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_management_server( + backupdr.CreateManagementServerRequest(), + parent="parent_value", + management_server=backupdr.ManagementServer(name="name_value"), + management_server_id="management_server_id_value", + ) + + +def test_create_management_server_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.DeleteManagementServerRequest, + dict, + ], +) +def test_delete_management_server_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_management_server(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_management_server_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_management_server + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_management_server + ] = mock_rpc + + request = {} + client.delete_management_server(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_management_server(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_management_server_rest_required_fields( + request_type=backupdr.DeleteManagementServerRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_management_server._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_management_server._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_management_server(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_management_server_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_management_server._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupdr.DeleteManagementServerRequest.pb( + backupdr.DeleteManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupdr.DeleteManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_management_server_rest_bad_request( + transport: str = "rest", request_type=backupdr.DeleteManagementServerRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_management_server(request) + + +def test_delete_management_server_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_management_server(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/managementServers/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_management_server_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_management_server( + backupdr.DeleteManagementServerRequest(), + name="name_value", + ) + + +def test_delete_management_server_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.CreateBackupVaultRequest, + dict, + ], +) +def test_create_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_vault"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_minimum_enforced_retention_duration": {"seconds": 751, "nanos": 543}, + "deletable": True, + "etag": "etag_value", + "state": 1, + "effective_time": {}, + "backup_count": 1278, + "service_account": "service_account_value", + "total_stored_bytes": 1946, + "uid": "uid_value", + "annotations": {}, + "access_restriction": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.CreateBackupVaultRequest.meta.fields["backup_vault"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_vault"][field])): + del request_init["backup_vault"][field][i][subfield] + else: + del request_init["backup_vault"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup_vault(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_vault + ] = mock_rpc + + request = {} + client.create_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_vault_rest_required_fields( + request_type=backupvault.CreateBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_vault_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backupVaultId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == request_init["backup_vault_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupVaultId"] = "backup_vault_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_vault_id", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == "backup_vault_id_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup_vault(request) + + expected_params = [ + ( + "backupVaultId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupVaultId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "backupVaultId", + "backupVault", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.CreateBackupVaultRequest.pb( + backupvault.CreateBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.CreateBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.CreateBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup_vault(request) + + +def test_create_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupVaults" + % client.transport._host, + args[1], + ) + + +def test_create_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_vault( + backupvault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", + ) + + +def test_create_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupVaultsRequest, + dict, + ], +) +def test_list_backup_vaults_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backup_vaults(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_vaults_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_vaults in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_vaults + ] = mock_rpc + + request = {} + client.list_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_vaults_rest_required_fields( + request_type=backupvault.ListBackupVaultsRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_vaults._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_vaults._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupVaultsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backup_vaults(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backup_vaults_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backup_vaults._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_vaults_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backup_vaults" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_backup_vaults" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.ListBackupVaultsRequest.pb( + backupvault.ListBackupVaultsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.ListBackupVaultsResponse.to_json( + backupvault.ListBackupVaultsResponse() + ) + + request = backupvault.ListBackupVaultsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.ListBackupVaultsResponse() + + client.list_backup_vaults( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backup_vaults_rest_bad_request( + transport: str = "rest", request_type=backupvault.ListBackupVaultsRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backup_vaults(request) + + +def test_list_backup_vaults_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupVaultsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backup_vaults(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupVaults" + % client.transport._host, + args[1], + ) + + +def test_list_backup_vaults_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_vaults( + backupvault.ListBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_list_backup_vaults_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.ListBackupVaultsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backup_vaults(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + pages = list(client.list_backup_vaults(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.FetchUsableBackupVaultsRequest, + dict, + ], +) +def test_fetch_usable_backup_vaults_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.fetch_usable_backup_vaults(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchUsableBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_fetch_usable_backup_vaults_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_usable_backup_vaults + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_usable_backup_vaults + ] = mock_rpc + + request = {} + client.fetch_usable_backup_vaults(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_usable_backup_vaults(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_fetch_usable_backup_vaults_rest_required_fields( + request_type=backupvault.FetchUsableBackupVaultsRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.FetchUsableBackupVaultsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_usable_backup_vaults(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_usable_backup_vaults_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_usable_backup_vaults._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_usable_backup_vaults_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_fetch_usable_backup_vaults" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_fetch_usable_backup_vaults" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.FetchUsableBackupVaultsRequest.pb( + backupvault.FetchUsableBackupVaultsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.FetchUsableBackupVaultsResponse.to_json( + backupvault.FetchUsableBackupVaultsResponse() + ) + + request = backupvault.FetchUsableBackupVaultsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.FetchUsableBackupVaultsResponse() + + client.fetch_usable_backup_vaults( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_fetch_usable_backup_vaults_rest_bad_request( + transport: str = "rest", request_type=backupvault.FetchUsableBackupVaultsRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.fetch_usable_backup_vaults(request) + + +def test_fetch_usable_backup_vaults_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.FetchUsableBackupVaultsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.fetch_usable_backup_vaults(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupVaults:fetchUsable" + % client.transport._host, + args[1], + ) + + +def test_fetch_usable_backup_vaults_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_usable_backup_vaults( + backupvault.FetchUsableBackupVaultsRequest(), + parent="parent_value", + ) + + +def test_fetch_usable_backup_vaults_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.FetchUsableBackupVaultsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.fetch_usable_backup_vaults(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + pages = list(client.fetch_usable_backup_vaults(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupVaultRequest, + dict, + ], +) +def test_get_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup_vault(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.BackupVault) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.deletable is True + assert response.etag == "etag_value" + assert response.state == backupvault.BackupVault.State.CREATING + assert response.backup_count == 1278 + assert response.service_account == "service_account_value" + assert response.total_stored_bytes == 1946 + assert response.uid == "uid_value" + assert ( + response.access_restriction + == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT + ) + + +def test_get_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_vault in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_vault + ] = mock_rpc + + request = {} + client.get_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_vault_rest_required_fields( + request_type=backupvault.GetBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.BackupVault() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup_vault(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.GetBackupVaultRequest.pb( + backupvault.GetBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.BackupVault.to_json( + backupvault.BackupVault() + ) + + request = backupvault.GetBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.BackupVault() + + client.get_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.GetBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup_vault(request) + + +def test_get_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.BackupVault() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_vault( + backupvault.GetBackupVaultRequest(), + name="name_value", + ) + + +def test_get_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupVaultRequest, + dict, + ], +) +def test_update_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + request_init["backup_vault"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_minimum_enforced_retention_duration": {"seconds": 751, "nanos": 543}, + "deletable": True, + "etag": "etag_value", + "state": 1, + "effective_time": {}, + "backup_count": 1278, + "service_account": "service_account_value", + "total_stored_bytes": 1946, + "uid": "uid_value", + "annotations": {}, + "access_restriction": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.UpdateBackupVaultRequest.meta.fields["backup_vault"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_vault"][field])): + del request_init["backup_vault"][field][i][subfield] + else: + del request_init["backup_vault"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_backup_vault(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_backup_vault + ] = mock_rpc + + request = {} + client.update_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_vault_rest_required_fields( + request_type=backupvault.UpdateBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "force", + "request_id", + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_backup_vault(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "force", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "backupVault", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_update_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.UpdateBackupVaultRequest.pb( + backupvault.UpdateBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.UpdateBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.UpdateBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_backup_vault(request) + + +def test_update_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_vault( + backupvault.UpdateBackupVaultRequest(), + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupVaultRequest, + dict, + ], +) +def test_delete_backup_vault_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup_vault(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_vault in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_vault + ] = mock_rpc + + request = {} + client.delete_backup_vault(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_vault(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_vault_rest_required_fields( + request_type=backupvault.DeleteBackupVaultRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_vault._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "force", + "request_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup_vault(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_vault_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "force", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.DeleteBackupVaultRequest.pb( + backupvault.DeleteBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.DeleteBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_vault_rest_bad_request( + transport: str = "rest", request_type=backupvault.DeleteBackupVaultRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup_vault(request) + + +def test_delete_backup_vault_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup_vault(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_vault_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_vault( + backupvault.DeleteBackupVaultRequest(), + name="name_value", + ) + + +def test_delete_backup_vault_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListDataSourcesRequest, + dict, + ], +) +def test_list_data_sources_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_data_sources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_data_sources_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_sources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_sources + ] = mock_rpc + + request = {} + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_sources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_data_sources_rest_required_fields( + request_type=backupvault.ListDataSourcesRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.ListDataSourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_data_sources(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_data_sources_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_data_sources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_sources_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_data_sources" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_data_sources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.ListDataSourcesRequest.pb( + backupvault.ListDataSourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.ListDataSourcesResponse.to_json( + backupvault.ListDataSourcesResponse() + ) + + request = backupvault.ListDataSourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.ListDataSourcesResponse() + + client.list_data_sources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_data_sources_rest_bad_request( + transport: str = "rest", request_type=backupvault.ListDataSourcesRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_data_sources(request) + + +def test_list_data_sources_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListDataSourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_data_sources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/backupVaults/*}/dataSources" + % client.transport._host, + args[1], + ) + + +def test_list_data_sources_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + backupvault.ListDataSourcesRequest(), + parent="parent_value", + ) + + +def test_list_data_sources_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.ListDataSourcesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + pager = client.list_data_sources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.DataSource) for i in results) + + pages = list(client.list_data_sources(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetDataSourceRequest, + dict, + ], +) +def test_get_data_source_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_data_source(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.DataSource) + assert response.name == "name_value" + assert response.state == backupvault.DataSource.State.CREATING + assert response.backup_count == 1278 + assert response.etag == "etag_value" + assert response.total_stored_bytes == 1946 + assert response.config_state == backupvault.BackupConfigState.ACTIVE + + +def test_get_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc + + request = {} + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_source_rest_required_fields( + request_type=backupvault.GetDataSourceRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_source_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_data_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_source_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_data_source" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.GetDataSourceRequest.pb( + backupvault.GetDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.DataSource.to_json( + backupvault.DataSource() + ) + + request = backupvault.GetDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.DataSource() + + client.get_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_data_source_rest_bad_request( + transport: str = "rest", request_type=backupvault.GetDataSourceRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_data_source(request) + + +def test_get_data_source_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_get_data_source_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + backupvault.GetDataSourceRequest(), + name="name_value", + ) + + +def test_get_data_source_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateDataSourceRequest, + dict, + ], +) +def test_update_data_source_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } + request_init["data_source"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4", + "state": 1, + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_count": 1278, + "etag": "etag_value", + "total_stored_bytes": 1946, + "config_state": 1, + "backup_config_info": { + "last_backup_state": 1, + "last_successful_backup_consistency_time": {}, + "last_backup_error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "gcp_backup_config": { + "backup_plan": "backup_plan_value", + "backup_plan_description": "backup_plan_description_value", + "backup_plan_association": "backup_plan_association_value", + "backup_plan_rules": [ + "backup_plan_rules_value1", + "backup_plan_rules_value2", + ], + }, + "backup_appliance_backup_config": { + "backup_appliance_name": "backup_appliance_name_value", + "backup_appliance_id": 1966, + "sla_id": 620, + "application_name": "application_name_value", + "host_name": "host_name_value", + "slt_name": "slt_name_value", + "slp_name": "slp_name_value", + }, + }, + "data_source_gcp_resource": { + "gcp_resourcename": "gcp_resourcename_value", + "location": "location_value", + "type_": "type__value", + "compute_instance_datasource_properties": { + "name": "name_value", + "description": "description_value", + "machine_type": "machine_type_value", + "total_disk_count": 1718, + "total_disk_size_gb": 1904, + }, + }, + "data_source_backup_appliance_application": { + "application_name": "application_name_value", + "backup_appliance": "backup_appliance_value", + "appliance_id": 1241, + "type_": "type__value", + "application_id": 1472, + "hostname": "hostname_value", + "host_id": 746, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.UpdateDataSourceRequest.meta.fields["data_source"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_source"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_source"][field])): + del request_init["data_source"][field][i][subfield] + else: + del request_init["data_source"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_data_source(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_source + ] = mock_rpc + + request = {} + client.update_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_source_rest_required_fields( + request_type=backupvault.UpdateDataSourceRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_source._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_data_source_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_data_source._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "dataSource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_source_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_data_source" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_update_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.UpdateDataSourceRequest.pb( + backupvault.UpdateDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.UpdateDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_data_source_rest_bad_request( + transport: str = "rest", request_type=backupvault.UpdateDataSourceRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_data_source(request) + + +def test_update_data_source_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_update_data_source_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_data_source_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupsRequest, + dict, + ], +) +def test_list_backups_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsRequest): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backups_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backups_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backups" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_backups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.ListBackupsRequest.pb(backupvault.ListBackupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.ListBackupsResponse.to_json( + backupvault.ListBackupsResponse() + ) + + request = backupvault.ListBackupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.ListBackupsResponse() + + client.list_backups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backups_rest_bad_request( + transport: str = "rest", request_type=backupvault.ListBackupsRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backups(request) + + +def test_list_backups_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups" + % client.transport._host, + args[1], + ) + + +def test_list_backups_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(backupvault.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.Backup) for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupRequest, + dict, + ], +) +def test_get_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + + +def test_get_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_rest_required_fields(request_type=backupvault.GetBackupRequest): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.Backup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.GetBackupRequest.pb(backupvault.GetBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupvault.Backup.to_json(backupvault.Backup()) + + request = backupvault.GetBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.Backup() + + client.get_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.GetBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup(request) + + +def test_get_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.Backup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", + ) + + +def test_get_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupRequest, + dict, + ], +) +def test_update_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + } + request_init["backup"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "enforced_retention_end_time": {}, + "expire_time": {}, + "consistency_time": {}, + "etag": "etag_value", + "state": 1, + "service_locks": [ + { + "lock_until_time": {}, + "backup_appliance_lock_info": { + "backup_appliance_id": 1966, + "backup_appliance_name": "backup_appliance_name_value", + "lock_reason": "lock_reason_value", + "job_name": "job_name_value", + "backup_image": "backup_image_value", + "sla_id": 620, + }, + "service_lock_info": {"operation": "operation_value"}, + } + ], + "backup_appliance_locks": {}, + "compute_instance_backup_properties": { + "description": "description_value", + "tags": {"items": ["items_value1", "items_value2"]}, + "machine_type": "machine_type_value", + "can_ip_forward": True, + "network_interface": [ + { + "network": "network_value", + "subnetwork": "subnetwork_value", + "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", + "internal_ipv6_prefix_length": 2831, + "name": "name_value", + "access_configs": [ + { + "type_": 1, + "name": "name_value", + "external_ip": "external_ip_value", + "external_ipv6": "external_ipv6_value", + "external_ipv6_prefix_length": 2837, + "set_public_ptr": True, + "public_ptr_domain_name": "public_ptr_domain_name_value", + "network_tier": 1, + } + ], + "ipv6_access_configs": {}, + "alias_ip_ranges": [ + { + "ip_cidr_range": "ip_cidr_range_value", + "subnetwork_range_name": "subnetwork_range_name_value", + } + ], + "stack_type": 1, + "ipv6_access_type": 1, + "queue_count": 1197, + "nic_type": 1, + "network_attachment": "network_attachment_value", + } + ], + "disk": [ + { + "initialize_params": { + "disk_name": "disk_name_value", + "replica_zones": [ + "replica_zones_value1", + "replica_zones_value2", + ], + }, + "device_name": "device_name_value", + "kind": "kind_value", + "disk_type_deprecated": 1, + "mode": 1, + "source": "source_value", + "index": 536, + "boot": True, + "auto_delete": True, + "license_": ["license__value1", "license__value2"], + "disk_interface": 1, + "guest_os_feature": [{"type_": 1}], + "disk_encryption_key": { + "raw_key": "raw_key_value", + "rsa_encrypted_key": "rsa_encrypted_key_value", + "kms_key_name": "kms_key_name_value", + "kms_key_service_account": "kms_key_service_account_value", + }, + "disk_size_gb": 1261, + "saved_state": 1, + "disk_type": "disk_type_value", + "type_": 1, + } + ], + "metadata": {"items": [{"key": "key_value", "value": "value_value"}]}, + "service_account": [ + {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} + ], + "scheduling": { + "on_host_maintenance": 1, + "automatic_restart": True, + "preemptible": True, + "node_affinities": [ + { + "key": "key_value", + "operator": 1, + "values": ["values_value1", "values_value2"], + } + ], + "min_node_cpus": 1379, + "provisioning_model": 1, + "instance_termination_action": 1, + "local_ssd_recovery_timeout": {"seconds": 751, "nanos": 543}, + }, + "guest_accelerator": [ + { + "accelerator_type": "accelerator_type_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "key_revocation_action_type": 1, + "source_instance": "source_instance_value", + "labels": {}, + }, + "backup_appliance_backup_properties": { + "generation_id": 1368, + "finalize_time": {}, + "recovery_range_start_time": {}, + "recovery_range_end_time": {}, + }, + "backup_type": 1, + "gcp_backup_plan_info": { + "backup_plan": "backup_plan_value", + "backup_plan_rule_id": "backup_plan_rule_id_value", + }, + "resource_size_bytes": 2056, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.UpdateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_rest_required_fields( + request_type=backupvault.UpdateBackupRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "backup", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_update_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.UpdateBackupRequest.pb( + backupvault.UpdateBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.UpdateBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.UpdateBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_backup(request) + + +def test_update_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + } + + # get truthy value for each flattened field + mock_args = dict( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup.name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.DeleteBackupRequest, + dict, + ], +) +def test_delete_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_rest_required_fields( + request_type=backupvault.DeleteBackupRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.DeleteBackupRequest.pb( + backupvault.DeleteBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.DeleteBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.DeleteBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup(request) + + +def test_delete_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backupvault.DeleteBackupRequest(), + name="name_value", + ) + + +def test_delete_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.RestoreBackupRequest, + dict, + ], +) +def test_restore_backup_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restore_backup(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_restore_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc + + request = {} + client.restore_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_restore_backup_rest_required_fields( + request_type=backupvault.RestoreBackupRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.restore_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_restore_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.restore_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_backup_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_restore_backup" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_restore_backup" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupvault.RestoreBackupRequest.pb( + backupvault.RestoreBackupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupvault.RestoreBackupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restore_backup( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restore_backup_rest_bad_request( + transport: str = "rest", request_type=backupvault.RestoreBackupRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restore_backup(request) + + +def test_restore_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.restore_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}:restore" + % client.transport._host, + args[1], + ) + + +def test_restore_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", + ) + + +def test_restore_backup_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.CreateBackupPlanRequest, + dict, + ], +) +def test_create_backup_plan_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_plan"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_rules": [ + { + "rule_id": "rule_id_value", + "backup_retention_days": 2237, + "standard_schedule": { + "recurrence_type": 1, + "hourly_frequency": 1748, + "days_of_week": [1], + "days_of_month": [1387, 1388], + "week_day_of_month": {"week_of_month": 1, "day_of_week": 1}, + "months": [1], + "backup_window": { + "start_hour_of_day": 1820, + "end_hour_of_day": 1573, + }, + "time_zone": "time_zone_value", + }, + } + ], + "state": 1, + "resource_type": "resource_type_value", + "etag": "etag_value", + "backup_vault": "backup_vault_value", + "backup_vault_service_account": "backup_vault_service_account_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupplan.CreateBackupPlanRequest.meta.fields["backup_plan"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_plan"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_plan"][field])): + del request_init["backup_plan"][field][i][subfield] + else: + del request_init["backup_plan"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup_plan(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan + ] = mock_rpc + + request = {} + client.create_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_plan_rest_required_fields( + request_type=backupplan.CreateBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_plan_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backupPlanId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupPlanId" in jsonified_request + assert jsonified_request["backupPlanId"] == request_init["backup_plan_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupPlanId"] = "backup_plan_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_plan_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupPlanId" in jsonified_request + assert jsonified_request["backupPlanId"] == "backup_plan_id_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup_plan(request) + + expected_params = [ + ( + "backupPlanId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupPlanId", + "requestId", + ) + ) + & set( + ( + "parent", + "backupPlanId", + "backupPlan", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_plan_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_plan" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_backup_plan" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.CreateBackupPlanRequest.pb( + backupplan.CreateBackupPlanRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupplan.CreateBackupPlanRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup_plan( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_plan_rest_bad_request( + transport: str = "rest", request_type=backupplan.CreateBackupPlanRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup_plan(request) + + +def test_create_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup_plan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlans" + % client.transport._host, + args[1], + ) + + +def test_create_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + + +def test_create_backup_plan_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.GetBackupPlanRequest, + dict, + ], +) +def test_get_backup_plan_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup_plan(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + + +def test_get_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_plan in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc + + request = {} + client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_plan_rest_required_fields( + request_type=backupplan.GetBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup_plan(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_plan_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup_plan" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_backup_plan" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.GetBackupPlanRequest.pb( + backupplan.GetBackupPlanRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupplan.BackupPlan.to_json( + backupplan.BackupPlan() + ) + + request = backupplan.GetBackupPlanRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupplan.BackupPlan() + + client.get_backup_plan( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_backup_plan_rest_bad_request( + transport: str = "rest", request_type=backupplan.GetBackupPlanRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_backup_plan(request) + + +def test_get_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlans/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_backup_plan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlans/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", + ) + + +def test_get_backup_plan_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.ListBackupPlansRequest, + dict, + ], +) +def test_list_backup_plans_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_backup_plans(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_plans_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_plans in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plans + ] = mock_rpc + + request = {} + client.list_backup_plans(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plans(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_plans_rest_required_fields( + request_type=backupplan.ListBackupPlansRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plans._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plans._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_backup_plans(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backup_plans_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backup_plans._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_plans_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backup_plans" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_backup_plans" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.ListBackupPlansRequest.pb( + backupplan.ListBackupPlansRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = backupplan.ListBackupPlansResponse.to_json( + backupplan.ListBackupPlansResponse() + ) + + request = backupplan.ListBackupPlansRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupplan.ListBackupPlansResponse() + + client.list_backup_plans( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_backup_plans_rest_bad_request( + transport: str = "rest", request_type=backupplan.ListBackupPlansRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_backup_plans(request) + + +def test_list_backup_plans_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_backup_plans(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlans" + % client.transport._host, + args[1], + ) + + +def test_list_backup_plans_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +def test_list_backup_plans_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupplan.ListBackupPlansResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backup_plans(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in results) + + pages = list(client.list_backup_plans(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplan.DeleteBackupPlanRequest, + dict, + ], +) +def test_delete_backup_plan_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_backup_plan(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan + ] = mock_rpc + + request = {} + client.delete_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_plan_rest_required_fields( + request_type=backupplan.DeleteBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_plan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_backup_plan(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_plan_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup_plan" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_backup_plan" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplan.DeleteBackupPlanRequest.pb( + backupplan.DeleteBackupPlanRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupplan.DeleteBackupPlanRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_backup_plan( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_backup_plan_rest_bad_request( + transport: str = "rest", request_type=backupplan.DeleteBackupPlanRequest +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_backup_plan(request) + + +def test_delete_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlans/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_backup_plan(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlans/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", + ) + + +def test_delete_backup_plan_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.CreateBackupPlanAssociationRequest, + dict, + ], +) +def test_create_backup_plan_association_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_plan_association"] = { + "name": "name_value", + "resource_type": "resource_type_value", + "resource": "resource_value", + "backup_plan": "backup_plan_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "rules_config_info": [ + { + "rule_id": "rule_id_value", + "last_backup_state": 1, + "last_backup_error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "last_successful_backup_consistency_time": {}, + } + ], + "data_source": "data_source_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupplanassociation.CreateBackupPlanAssociationRequest.meta.fields[ + "backup_plan_association" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backup_plan_association" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_plan_association"][field])): + del request_init["backup_plan_association"][field][i][subfield] + else: + del request_init["backup_plan_association"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_backup_plan_association(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_backup_plan_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan_association + ] = mock_rpc + + request = {} + client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_plan_association_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backupPlanAssociationId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupPlanAssociationId" in jsonified_request + assert ( + jsonified_request["backupPlanAssociationId"] + == request_init["backup_plan_association_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupPlanAssociationId"] = "backup_plan_association_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_plan_association_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupPlanAssociationId" in jsonified_request + assert ( + jsonified_request["backupPlanAssociationId"] + == "backup_plan_association_id_value" + ) + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_backup_plan_association(request) + + expected_params = [ + ( + "backupPlanAssociationId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_plan_association_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_plan_association._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "backupPlanAssociationId", + "requestId", + ) + ) + & set( + ( + "parent", + "backupPlanAssociationId", + "backupPlanAssociation", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_plan_association_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_plan_association" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_backup_plan_association" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = backupplanassociation.CreateBackupPlanAssociationRequest.pb( + backupplanassociation.CreateBackupPlanAssociationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = backupplanassociation.CreateBackupPlanAssociationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_backup_plan_association( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_backup_plan_association_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_backup_plan_association(request) + + +def test_create_backup_plan_association_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_backup_plan_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" + % client.transport._host, + args[1], + ) + + +def test_create_backup_plan_association_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + +def test_create_backup_plan_association_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.GetBackupPlanAssociationRequest, + dict, + ], +) +def test_get_backup_plan_association_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_backup_plan_association(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + + +def test_get_backup_plan_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_plan_association + ] = mock_rpc + + request = {} + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.GetBackupPlanAssociationRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplanassociation.BackupPlanAssociation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_backup_plan_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_plan_association_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_plan_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_plan_association_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), ) client = BackupDRClient(transport=transport) @@ -3105,14 +21997,14 @@ def test_list_management_servers_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_management_servers" + transports.BackupDRRestInterceptor, "post_get_backup_plan_association" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_list_management_servers" + transports.BackupDRRestInterceptor, "pre_get_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.ListManagementServersRequest.pb( - backupdr.ListManagementServersRequest() + pb_message = backupplanassociation.GetBackupPlanAssociationRequest.pb( + backupplanassociation.GetBackupPlanAssociationRequest() ) transcode.return_value = { "method": "post", @@ -3124,19 +22016,19 @@ def test_list_management_servers_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = backupdr.ListManagementServersResponse.to_json( - backupdr.ListManagementServersResponse() + req.return_value._content = backupplanassociation.BackupPlanAssociation.to_json( + backupplanassociation.BackupPlanAssociation() ) - request = backupdr.ListManagementServersRequest() + request = backupplanassociation.GetBackupPlanAssociationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupdr.ListManagementServersResponse() + post.return_value = backupplanassociation.BackupPlanAssociation() - client.list_management_servers( + client.get_backup_plan_association( request, metadata=[ ("key", "val"), @@ -3148,8 +22040,9 @@ def test_list_management_servers_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_management_servers_rest_bad_request( - transport: str = "rest", request_type=backupdr.ListManagementServersRequest +def test_get_backup_plan_association_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.GetBackupPlanAssociationRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3157,7 +22050,9 @@ def test_list_management_servers_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3169,10 +22064,10 @@ def test_list_management_servers_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_management_servers(request) + client.get_backup_plan_association(request) -def test_list_management_servers_rest_flattened(): +def test_get_backup_plan_association_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3181,14 +22076,16 @@ def test_list_management_servers_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse() + return_value = backupplanassociation.BackupPlanAssociation() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -3196,159 +22093,90 @@ def test_list_management_servers_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_management_servers(**mock_args) + client.get_backup_plan_association(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/managementServers" + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" % client.transport._host, args[1], ) -def test_list_management_servers_rest_flattened_error(transport: str = "rest"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_management_servers( - backupdr.ListManagementServersRequest(), - parent="parent_value", - ) - - -def test_list_management_servers_rest_pager(transport: str = "rest"): +def test_get_backup_plan_association_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - next_page_token="abc", - ), - backupdr.ListManagementServersResponse( - management_servers=[], - next_page_token="def", - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - ], - next_page_token="ghi", - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - backupdr.ListManagementServersResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_management_servers(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupdr.ManagementServer) for i in results) - pages = list(client.list_management_servers(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_get_backup_plan_association_rest_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - backupdr.GetManagementServerRequest, + backupplanassociation.ListBackupPlanAssociationsRequest, dict, ], ) -def test_get_management_server_rest(request_type): +def test_list_backup_plan_associations_rest(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer( - name="name_value", - description="description_value", - type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, - state=backupdr.ManagementServer.InstanceState.CREATING, - etag="etag_value", - oauth2_client_id="oauth2_client_id_value", - ba_proxy_uri=["ba_proxy_uri_value"], - satisfies_pzi=True, + return_value = backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_management_server(request) + response = client.list_backup_plan_associations(request) # Establish that the response is the type that we expect. - assert isinstance(response, backupdr.ManagementServer) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.type_ == backupdr.ManagementServer.InstanceType.BACKUP_RESTORE - assert response.state == backupdr.ManagementServer.InstanceState.CREATING - assert response.etag == "etag_value" - assert response.oauth2_client_id == "oauth2_client_id_value" - assert response.ba_proxy_uri == ["ba_proxy_uri_value"] - assert response.satisfies_pzi is True + assert isinstance(response, pagers.ListBackupPlanAssociationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_get_management_server_rest_use_cached_wrapped_rpc(): +def test_list_backup_plan_associations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3363,7 +22191,7 @@ def test_get_management_server_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_management_server + client._transport.list_backup_plan_associations in client._transport._wrapped_methods ) @@ -3373,29 +22201,29 @@ def test_get_management_server_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_management_server + client._transport.list_backup_plan_associations ] = mock_rpc request = {} - client.get_management_server(request) + client.list_backup_plan_associations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_management_server(request) + client.list_backup_plan_associations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_management_server_rest_required_fields( - request_type=backupdr.GetManagementServerRequest, +def test_list_backup_plan_associations_rest_required_fields( + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3406,21 +22234,29 @@ def test_get_management_server_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_management_server._get_unset_required_fields(jsonified_request) + ).list_backup_plan_associations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_management_server._get_unset_required_fields(jsonified_request) + ).list_backup_plan_associations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3429,7 +22265,7 @@ def test_get_management_server_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer() + return_value = backupplanassociation.ListBackupPlanAssociationsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3450,30 +22286,43 @@ def test_get_management_server_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_management_server(request) + response = client.list_backup_plan_associations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_management_server_rest_unset_required_fields(): +def test_list_backup_plan_associations_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_management_server._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_backup_plan_associations._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_management_server_rest_interceptors(null_interceptor): +def test_list_backup_plan_associations_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -3484,14 +22333,14 @@ def test_get_management_server_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_management_server" + transports.BackupDRRestInterceptor, "post_list_backup_plan_associations" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_get_management_server" + transports.BackupDRRestInterceptor, "pre_list_backup_plan_associations" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.GetManagementServerRequest.pb( - backupdr.GetManagementServerRequest() + pb_message = backupplanassociation.ListBackupPlanAssociationsRequest.pb( + backupplanassociation.ListBackupPlanAssociationsRequest() ) transcode.return_value = { "method": "post", @@ -3503,19 +22352,21 @@ def test_get_management_server_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = backupdr.ManagementServer.to_json( - backupdr.ManagementServer() + req.return_value._content = ( + backupplanassociation.ListBackupPlanAssociationsResponse.to_json( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) ) - request = backupdr.GetManagementServerRequest() + request = backupplanassociation.ListBackupPlanAssociationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupdr.ManagementServer() + post.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - client.get_management_server( + client.list_backup_plan_associations( request, metadata=[ ("key", "val"), @@ -3527,8 +22378,9 @@ def test_get_management_server_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_management_server_rest_bad_request( - transport: str = "rest", request_type=backupdr.GetManagementServerRequest +def test_list_backup_plan_associations_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3536,9 +22388,7 @@ def test_get_management_server_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3550,10 +22400,10 @@ def test_get_management_server_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_management_server(request) + client.list_backup_plan_associations(request) -def test_get_management_server_rest_flattened(): +def test_list_backup_plan_associations_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3562,16 +22412,14 @@ def test_get_management_server_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer() + return_value = backupplanassociation.ListBackupPlanAssociationsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -3579,25 +22427,27 @@ def test_get_management_server_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_management_server(**mock_args) + client.list_backup_plan_associations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/managementServers/*}" + "%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" % client.transport._host, args[1], ) -def test_get_management_server_rest_flattened_error(transport: str = "rest"): +def test_list_backup_plan_associations_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3606,124 +22456,95 @@ def test_get_management_server_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_management_server( - backupdr.GetManagementServerRequest(), - name="name_value", + client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", ) -def test_get_management_server_rest_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - backupdr.CreateManagementServerRequest, - dict, - ], -) -def test_create_management_server_rest(request_type): +def test_list_backup_plan_associations_rest_pager(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["management_server"] = { - "name": "name_value", - "description": "description_value", - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "type_": 1, - "management_uri": {"web_ui": "web_ui_value", "api": "api_value"}, - "workforce_identity_based_management_uri": { - "first_party_management_uri": "first_party_management_uri_value", - "third_party_management_uri": "third_party_management_uri_value", - }, - "state": 1, - "networks": [{"network": "network_value", "peering_mode": 1}], - "etag": "etag_value", - "oauth2_client_id": "oauth2_client_id_value", - "workforce_identity_based_oauth2_client_id": { - "first_party_oauth2_client_id": "first_party_oauth2_client_id_value", - "third_party_oauth2_client_id": "third_party_oauth2_client_id_value", - }, - "ba_proxy_uri": ["ba_proxy_uri_value1", "ba_proxy_uri_value2"], - "satisfies_pzs": {"value": True}, - "satisfies_pzi": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupdr.CreateManagementServerRequest.meta.fields["management_server"] + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + ) + # Two responses for two calls + response = response + response - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the values into proper Response objs + response = tuple( + backupplanassociation.ListBackupPlanAssociationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + sample_request = {"parent": "projects/sample1/locations/sample2"} - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + pager = client.list_backup_plan_associations(request=sample_request) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results + ) - subfields_not_in_runtime = [] + pages = list(client.list_backup_plan_associations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["management_server"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.DeleteBackupPlanAssociationRequest, + dict, + ], +) +def test_delete_backup_plan_association_rest(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["management_server"][field])): - del request_init["management_server"][field][i][subfield] - else: - del request_init["management_server"][field][subfield] + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3738,13 +22559,13 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_management_server(request) + response = client.delete_backup_plan_association(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_management_server_rest_use_cached_wrapped_rpc(): +def test_delete_backup_plan_association_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3759,7 +22580,7 @@ def test_create_management_server_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_management_server + client._transport.delete_backup_plan_association in client._transport._wrapped_methods ) @@ -3769,11 +22590,11 @@ def test_create_management_server_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_management_server + client._transport.delete_backup_plan_association ] = mock_rpc request = {} - client.create_management_server(request) + client.delete_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3782,21 +22603,20 @@ def test_create_management_server_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_management_server(request) + client.delete_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_management_server_rest_required_fields( - request_type=backupdr.CreateManagementServerRequest, +def test_delete_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["parent"] = "" - request_init["management_server_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3804,39 +22624,26 @@ def test_create_management_server_rest_required_fields( ) # verify fields with default values are dropped - assert "managementServerId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_management_server._get_unset_required_fields(jsonified_request) + ).delete_backup_plan_association._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "managementServerId" in jsonified_request - assert ( - jsonified_request["managementServerId"] == request_init["management_server_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["managementServerId"] = "management_server_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_management_server._get_unset_required_fields(jsonified_request) + ).delete_backup_plan_association._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "management_server_id", - "request_id", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "managementServerId" in jsonified_request - assert jsonified_request["managementServerId"] == "management_server_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3857,10 +22664,9 @@ def test_create_management_server_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -3870,44 +22676,26 @@ def test_create_management_server_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_management_server(request) + response = client.delete_backup_plan_association(request) - expected_params = [ - ( - "managementServerId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_management_server_rest_unset_required_fields(): +def test_delete_backup_plan_association_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_management_server._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "managementServerId", - "requestId", - ) - ) - & set( - ( - "parent", - "managementServerId", - "managementServer", - ) - ) + unset_fields = transport.delete_backup_plan_association._get_unset_required_fields( + {} ) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_management_server_rest_interceptors(null_interceptor): +def test_delete_backup_plan_association_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -3920,14 +22708,14 @@ def test_create_management_server_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_create_management_server" + transports.BackupDRRestInterceptor, "post_delete_backup_plan_association" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_create_management_server" + transports.BackupDRRestInterceptor, "pre_delete_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.CreateManagementServerRequest.pb( - backupdr.CreateManagementServerRequest() + pb_message = backupplanassociation.DeleteBackupPlanAssociationRequest.pb( + backupplanassociation.DeleteBackupPlanAssociationRequest() ) transcode.return_value = { "method": "post", @@ -3943,7 +22731,7 @@ def test_create_management_server_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = backupdr.CreateManagementServerRequest() + request = backupplanassociation.DeleteBackupPlanAssociationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -3951,7 +22739,7 @@ def test_create_management_server_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_management_server( + client.delete_backup_plan_association( request, metadata=[ ("key", "val"), @@ -3963,8 +22751,9 @@ def test_create_management_server_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_management_server_rest_bad_request( - transport: str = "rest", request_type=backupdr.CreateManagementServerRequest +def test_delete_backup_plan_association_rest_bad_request( + transport: str = "rest", + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3972,7 +22761,9 @@ def test_create_management_server_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3984,10 +22775,10 @@ def test_create_management_server_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_management_server(request) + client.delete_backup_plan_association(request) -def test_create_management_server_rest_flattened(): +def test_delete_backup_plan_association_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3999,13 +22790,13 @@ def test_create_management_server_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - management_server=backupdr.ManagementServer(name="name_value"), - management_server_id="management_server_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -4016,20 +22807,20 @@ def test_create_management_server_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_management_server(**mock_args) + client.delete_backup_plan_association(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/managementServers" + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" % client.transport._host, args[1], ) -def test_create_management_server_rest_flattened_error(transport: str = "rest"): +def test_delete_backup_plan_association_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4038,15 +22829,13 @@ def test_create_management_server_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_management_server( - backupdr.CreateManagementServerRequest(), - parent="parent_value", - management_server=backupdr.ManagementServer(name="name_value"), - management_server_id="management_server_id_value", + client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", ) -def test_create_management_server_rest_error(): +def test_delete_backup_plan_association_rest_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4055,11 +22844,11 @@ def test_create_management_server_rest_error(): @pytest.mark.parametrize( "request_type", [ - backupdr.DeleteManagementServerRequest, + backupplanassociation.TriggerBackupRequest, dict, ], ) -def test_delete_management_server_rest(request_type): +def test_trigger_backup_rest(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4067,7 +22856,7 @@ def test_delete_management_server_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" } request = request_type(**request_init) @@ -4083,13 +22872,13 @@ def test_delete_management_server_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_management_server(request) + response = client.trigger_backup(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_delete_management_server_rest_use_cached_wrapped_rpc(): +def test_trigger_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4103,22 +22892,17 @@ def test_delete_management_server_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_management_server - in client._transport._wrapped_methods - ) + assert client._transport.trigger_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_management_server - ] = mock_rpc + client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc request = {} - client.delete_management_server(request) + client.trigger_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -4127,20 +22911,21 @@ def test_delete_management_server_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_management_server(request) + client.trigger_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_management_server_rest_required_fields( - request_type=backupdr.DeleteManagementServerRequest, +def test_trigger_backup_rest_required_fields( + request_type=backupplanassociation.TriggerBackupRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} request_init["name"] = "" + request_init["rule_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4151,23 +22936,24 @@ def test_delete_management_server_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_management_server._get_unset_required_fields(jsonified_request) + ).trigger_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" + jsonified_request["ruleId"] = "rule_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_management_server._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + ).trigger_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" + assert "ruleId" in jsonified_request + assert jsonified_request["ruleId"] == "rule_id_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4188,9 +22974,10 @@ def test_delete_management_server_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -4200,24 +22987,32 @@ def test_delete_management_server_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_management_server(request) + response = client.trigger_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_management_server_rest_unset_required_fields(): +def test_trigger_backup_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_management_server._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.trigger_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "ruleId", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_management_server_rest_interceptors(null_interceptor): +def test_trigger_backup_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -4230,14 +23025,14 @@ def test_delete_management_server_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_delete_management_server" + transports.BackupDRRestInterceptor, "post_trigger_backup" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_delete_management_server" + transports.BackupDRRestInterceptor, "pre_trigger_backup" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = backupdr.DeleteManagementServerRequest.pb( - backupdr.DeleteManagementServerRequest() + pb_message = backupplanassociation.TriggerBackupRequest.pb( + backupplanassociation.TriggerBackupRequest() ) transcode.return_value = { "method": "post", @@ -4253,7 +23048,7 @@ def test_delete_management_server_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = backupdr.DeleteManagementServerRequest() + request = backupplanassociation.TriggerBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4261,7 +23056,7 @@ def test_delete_management_server_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_management_server( + client.trigger_backup( request, metadata=[ ("key", "val"), @@ -4273,8 +23068,8 @@ def test_delete_management_server_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_management_server_rest_bad_request( - transport: str = "rest", request_type=backupdr.DeleteManagementServerRequest +def test_trigger_backup_rest_bad_request( + transport: str = "rest", request_type=backupplanassociation.TriggerBackupRequest ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4283,7 +23078,7 @@ def test_delete_management_server_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" } request = request_type(**request_init) @@ -4296,10 +23091,10 @@ def test_delete_management_server_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_management_server(request) + client.trigger_backup(request) -def test_delete_management_server_rest_flattened(): +def test_trigger_backup_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4312,12 +23107,13 @@ def test_delete_management_server_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" } # get truthy value for each flattened field mock_args = dict( name="name_value", + rule_id="rule_id_value", ) mock_args.update(sample_request) @@ -4328,20 +23124,20 @@ def test_delete_management_server_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.delete_management_server(**mock_args) + client.trigger_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/managementServers/*}" + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}:triggerBackup" % client.transport._host, args[1], ) -def test_delete_management_server_rest_flattened_error(transport: str = "rest"): +def test_trigger_backup_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4350,13 +23146,14 @@ def test_delete_management_server_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_management_server( - backupdr.DeleteManagementServerRequest(), + client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), name="name_value", + rule_id="rule_id_value", ) -def test_delete_management_server_rest_error(): +def test_trigger_backup_rest_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4505,6 +23302,29 @@ def test_backup_dr_base_transport(): "get_management_server", "create_management_server", "delete_management_server", + "create_backup_vault", + "list_backup_vaults", + "fetch_usable_backup_vaults", + "get_backup_vault", + "update_backup_vault", + "delete_backup_vault", + "list_data_sources", + "get_data_source", + "update_data_source", + "list_backups", + "get_backup", + "update_backup", + "delete_backup", + "restore_backup", + "create_backup_plan", + "get_backup_plan", + "list_backup_plans", + "delete_backup_plan", + "create_backup_plan_association", + "get_backup_plan_association", + "list_backup_plan_associations", + "delete_backup_plan_association", + "trigger_backup", "set_iam_policy", "get_iam_policy", "test_iam_permissions", @@ -4803,6 +23623,75 @@ def test_backup_dr_client_transport_session_collision(transport_name): session1 = client1.transport.delete_management_server._session session2 = client2.transport.delete_management_server._session assert session1 != session2 + session1 = client1.transport.create_backup_vault._session + session2 = client2.transport.create_backup_vault._session + assert session1 != session2 + session1 = client1.transport.list_backup_vaults._session + session2 = client2.transport.list_backup_vaults._session + assert session1 != session2 + session1 = client1.transport.fetch_usable_backup_vaults._session + session2 = client2.transport.fetch_usable_backup_vaults._session + assert session1 != session2 + session1 = client1.transport.get_backup_vault._session + session2 = client2.transport.get_backup_vault._session + assert session1 != session2 + session1 = client1.transport.update_backup_vault._session + session2 = client2.transport.update_backup_vault._session + assert session1 != session2 + session1 = client1.transport.delete_backup_vault._session + session2 = client2.transport.delete_backup_vault._session + assert session1 != session2 + session1 = client1.transport.list_data_sources._session + session2 = client2.transport.list_data_sources._session + assert session1 != session2 + session1 = client1.transport.get_data_source._session + session2 = client2.transport.get_data_source._session + assert session1 != session2 + session1 = client1.transport.update_data_source._session + session2 = client2.transport.update_data_source._session + assert session1 != session2 + session1 = client1.transport.list_backups._session + session2 = client2.transport.list_backups._session + assert session1 != session2 + session1 = client1.transport.get_backup._session + session2 = client2.transport.get_backup._session + assert session1 != session2 + session1 = client1.transport.update_backup._session + session2 = client2.transport.update_backup._session + assert session1 != session2 + session1 = client1.transport.delete_backup._session + session2 = client2.transport.delete_backup._session + assert session1 != session2 + session1 = client1.transport.restore_backup._session + session2 = client2.transport.restore_backup._session + assert session1 != session2 + session1 = client1.transport.create_backup_plan._session + session2 = client2.transport.create_backup_plan._session + assert session1 != session2 + session1 = client1.transport.get_backup_plan._session + session2 = client2.transport.get_backup_plan._session + assert session1 != session2 + session1 = client1.transport.list_backup_plans._session + session2 = client2.transport.list_backup_plans._session + assert session1 != session2 + session1 = client1.transport.delete_backup_plan._session + session2 = client2.transport.delete_backup_plan._session + assert session1 != session2 + session1 = client1.transport.create_backup_plan_association._session + session2 = client2.transport.create_backup_plan_association._session + assert session1 != session2 + session1 = client1.transport.get_backup_plan_association._session + session2 = client2.transport.get_backup_plan_association._session + assert session1 != session2 + session1 = client1.transport.list_backup_plan_associations._session + session2 = client2.transport.list_backup_plan_associations._session + assert session1 != session2 + session1 = client1.transport.delete_backup_plan_association._session + session2 = client2.transport.delete_backup_plan_association._session + assert session1 != session2 + session1 = client1.transport.trigger_backup._session + session2 = client2.transport.trigger_backup._session + assert session1 != session2 def test_backup_dr_grpc_transport_channel(): @@ -4957,6 +23846,153 @@ def test_backup_dr_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client +def test_backup_path(): + project = "squid" + location = "clam" + backupvault = "whelk" + datasource = "octopus" + backup = "oyster" + expected = "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}/backups/{backup}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + backup=backup, + ) + actual = BackupDRClient.backup_path( + project, location, backupvault, datasource, backup + ) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "backupvault": "mussel", + "datasource": "winkle", + "backup": "nautilus", + } + path = BackupDRClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_path(path) + assert expected == actual + + +def test_backup_plan_path(): + project = "scallop" + location = "abalone" + backup_plan = "squid" + expected = ( + "projects/{project}/locations/{location}/backupPlans/{backup_plan}".format( + project=project, + location=location, + backup_plan=backup_plan, + ) + ) + actual = BackupDRClient.backup_plan_path(project, location, backup_plan) + assert expected == actual + + +def test_parse_backup_plan_path(): + expected = { + "project": "clam", + "location": "whelk", + "backup_plan": "octopus", + } + path = BackupDRClient.backup_plan_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_plan_path(path) + assert expected == actual + + +def test_backup_plan_association_path(): + project = "oyster" + location = "nudibranch" + backup_plan_association = "cuttlefish" + expected = "projects/{project}/locations/{location}/backupPlanAssociations/{backup_plan_association}".format( + project=project, + location=location, + backup_plan_association=backup_plan_association, + ) + actual = BackupDRClient.backup_plan_association_path( + project, location, backup_plan_association + ) + assert expected == actual + + +def test_parse_backup_plan_association_path(): + expected = { + "project": "mussel", + "location": "winkle", + "backup_plan_association": "nautilus", + } + path = BackupDRClient.backup_plan_association_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_plan_association_path(path) + assert expected == actual + + +def test_backup_vault_path(): + project = "scallop" + location = "abalone" + backupvault = "squid" + expected = ( + "projects/{project}/locations/{location}/backupVaults/{backupvault}".format( + project=project, + location=location, + backupvault=backupvault, + ) + ) + actual = BackupDRClient.backup_vault_path(project, location, backupvault) + assert expected == actual + + +def test_parse_backup_vault_path(): + expected = { + "project": "clam", + "location": "whelk", + "backupvault": "octopus", + } + path = BackupDRClient.backup_vault_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_vault_path(path) + assert expected == actual + + +def test_data_source_path(): + project = "oyster" + location = "nudibranch" + backupvault = "cuttlefish" + datasource = "mussel" + expected = "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}".format( + project=project, + location=location, + backupvault=backupvault, + datasource=datasource, + ) + actual = BackupDRClient.data_source_path(project, location, backupvault, datasource) + assert expected == actual + + +def test_parse_data_source_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "backupvault": "scallop", + "datasource": "abalone", + } + path = BackupDRClient.data_source_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_data_source_path(path) + assert expected == actual + + def test_management_server_path(): project = "squid" location = "clam" diff --git a/packages/google-cloud-batch/CHANGELOG.md b/packages/google-cloud-batch/CHANGELOG.md index addee943a3ea..b923d8c36e1e 100644 --- a/packages/google-cloud-batch/CHANGELOG.md +++ b/packages/google-cloud-batch/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.17.29](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.28...google-cloud-batch-v0.17.29) (2024-10-08) + + +### Documentation + +* Clarify Batch only supports global custom instance template now ([023d099](https://github.com/googleapis/google-cloud-python/commit/023d09955a2b4e013a3506d2dbed45c3e7e4a696)) + +## [0.17.28](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.27...google-cloud-batch-v0.17.28) (2024-09-16) + + +### Features + +* [google-cloud-batch] A new value `CANCELLATION_IN_PROGRESS` is added to enum `State` ([#13074](https://github.com/googleapis/google-cloud-python/issues/13074)) ([76267b2](https://github.com/googleapis/google-cloud-python/commit/76267b2b8998fd2a3602ebf4d12d2aaa30a90cde)) + ## [0.17.27](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.26...google-cloud-batch-v0.17.27) (2024-09-03) diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 7de8a6a6838d..059e03e3105c 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.27" # {x-release-please-version} +__version__ = "0.17.29" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 7de8a6a6838d..059e03e3105c 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.27" # {x-release-please-version} +__version__ = "0.17.29" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py index c11a34f16b56..a5aca6e5d1bc 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py @@ -792,7 +792,11 @@ class InstancePolicyOrTemplate(proto.Message): instance_template (str): Name of an instance template used to create VMs. Named the field as 'instance_template' instead of 'template' to avoid - c++ keyword conflict. + C++ keyword conflict. + + Batch only supports global instance templates. You can + specify the global instance template as a full or partial + URL. This field is a member of `oneof`_ ``policy_template``. install_gpu_drivers (bool): diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 7de8a6a6838d..059e03e3105c 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.27" # {x-release-please-version} +__version__ = "0.17.29" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py index 666e309ad00b..744d6bdb9a8b 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py @@ -332,6 +332,14 @@ class State(proto.Enum): The Job will be deleted, but has not been deleted yet. Typically this is because resources used by the Job are still being cleaned up. + CANCELLATION_IN_PROGRESS (7): + The Job cancellation is in progress, this is + because the resources used by the Job are still + being cleaned up. + CANCELLED (8): + The Job has been cancelled, the task + executions were stopped and the resources were + cleaned up. """ STATE_UNSPECIFIED = 0 QUEUED = 1 @@ -340,6 +348,8 @@ class State(proto.Enum): SUCCEEDED = 4 FAILED = 5 DELETION_IN_PROGRESS = 6 + CANCELLATION_IN_PROGRESS = 7 + CANCELLED = 8 class InstanceStatus(proto.Message): r"""VM instance status. diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index 1a9ad7a0b658..aa5fe556ec83 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.27" + "version": "0.17.29" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 9c3638c4d767..479ce3ca3586 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.27" + "version": "0.17.29" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md index 953e701b7f5e..0df3c54ccc62 100644 --- a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md +++ b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history +## [3.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.7...google-cloud-bigquery-datatransfer-v3.16.0) (2024-10-08) + + +### Features + +* Add scheduleOptionsV2 and Error fields for TransferConfig ([052585c](https://github.com/googleapis/google-cloud-python/commit/052585c63dfa172b7f88fdb5882eda446fc47bfe)) + ## [3.15.7](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.15.6...google-cloud-bigquery-datatransfer-v3.15.7) (2024-09-04) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py index c2cce6e9d9e7..bb51c87c8254 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py @@ -54,7 +54,11 @@ from google.cloud.bigquery_datatransfer_v1.types.transfer import ( EmailPreferences, EncryptionConfiguration, + EventDrivenSchedule, + ManualSchedule, ScheduleOptions, + ScheduleOptionsV2, + TimeBasedSchedule, TransferConfig, TransferMessage, TransferRun, @@ -93,7 +97,11 @@ "UpdateTransferConfigRequest", "EmailPreferences", "EncryptionConfiguration", + "EventDrivenSchedule", + "ManualSchedule", "ScheduleOptions", + "ScheduleOptionsV2", + "TimeBasedSchedule", "TransferConfig", "TransferMessage", "TransferRun", diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py index fc64b41dd679..dd91ea26f1b4 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.7" # {x-release-please-version} +__version__ = "3.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py index 41ad09552699..7df301ab3c59 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py @@ -52,7 +52,11 @@ from .types.transfer import ( EmailPreferences, EncryptionConfiguration, + EventDrivenSchedule, + ManualSchedule, ScheduleOptions, + ScheduleOptionsV2, + TimeBasedSchedule, TransferConfig, TransferMessage, TransferRun, @@ -74,6 +78,7 @@ "EmailPreferences", "EncryptionConfiguration", "EnrollDataSourcesRequest", + "EventDrivenSchedule", "GetDataSourceRequest", "GetTransferConfigRequest", "GetTransferRunRequest", @@ -85,11 +90,14 @@ "ListTransferLogsResponse", "ListTransferRunsRequest", "ListTransferRunsResponse", + "ManualSchedule", "ScheduleOptions", + "ScheduleOptionsV2", "ScheduleTransferRunsRequest", "ScheduleTransferRunsResponse", "StartManualTransferRunsRequest", "StartManualTransferRunsResponse", + "TimeBasedSchedule", "TransferConfig", "TransferMessage", "TransferRun", diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py index fc64b41dd679..dd91ea26f1b4 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.7" # {x-release-please-version} +__version__ = "3.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py index 2caa0e24a50d..f704ac5f758d 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py @@ -43,7 +43,11 @@ from .transfer import ( EmailPreferences, EncryptionConfiguration, + EventDrivenSchedule, + ManualSchedule, ScheduleOptions, + ScheduleOptionsV2, + TimeBasedSchedule, TransferConfig, TransferMessage, TransferRun, @@ -80,7 +84,11 @@ "UpdateTransferConfigRequest", "EmailPreferences", "EncryptionConfiguration", + "EventDrivenSchedule", + "ManualSchedule", "ScheduleOptions", + "ScheduleOptionsV2", + "TimeBasedSchedule", "TransferConfig", "TransferMessage", "TransferRun", diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py index 4403154949f8..bd37dfdec84b 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py @@ -30,6 +30,10 @@ "TransferState", "EmailPreferences", "ScheduleOptions", + "ScheduleOptionsV2", + "TimeBasedSchedule", + "ManualSchedule", + "EventDrivenSchedule", "UserInfo", "TransferConfig", "EncryptionConfiguration", @@ -144,6 +148,130 @@ class ScheduleOptions(proto.Message): ) +class ScheduleOptionsV2(proto.Message): + r"""V2 options customizing different types of data transfer + schedule. This field supports existing time-based and manual + transfer schedule. Also supports Event-Driven transfer schedule. + ScheduleOptionsV2 cannot be used together with + ScheduleOptions/Schedule. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + time_based_schedule (google.cloud.bigquery_datatransfer_v1.types.TimeBasedSchedule): + Time based transfer schedule options. This is + the default schedule option. + + This field is a member of `oneof`_ ``schedule``. + manual_schedule (google.cloud.bigquery_datatransfer_v1.types.ManualSchedule): + Manual transfer schedule. If set, the transfer run will not + be auto-scheduled by the system, unless the client invokes + StartManualTransferRuns. This is equivalent to + disable_auto_scheduling = true. + + This field is a member of `oneof`_ ``schedule``. + event_driven_schedule (google.cloud.bigquery_datatransfer_v1.types.EventDrivenSchedule): + Event driven transfer schedule options. If + set, the transfer will be scheduled upon events + arrial. + + This field is a member of `oneof`_ ``schedule``. + """ + + time_based_schedule: "TimeBasedSchedule" = proto.Field( + proto.MESSAGE, + number=1, + oneof="schedule", + message="TimeBasedSchedule", + ) + manual_schedule: "ManualSchedule" = proto.Field( + proto.MESSAGE, + number=2, + oneof="schedule", + message="ManualSchedule", + ) + event_driven_schedule: "EventDrivenSchedule" = proto.Field( + proto.MESSAGE, + number=3, + oneof="schedule", + message="EventDrivenSchedule", + ) + + +class TimeBasedSchedule(proto.Message): + r"""Options customizing the time based transfer schedule. + Options are migrated from the original ScheduleOptions message. + + Attributes: + schedule (str): + Data transfer schedule. If the data source does not support + a custom schedule, this should be empty. If it is empty, the + default value for the data source will be used. The + specified times are in UTC. Examples of valid format: + ``1st,3rd monday of month 15:30``, + ``every wed,fri of jan,jun 13:15``, and + ``first sunday of quarter 00:00``. See more explanation + about the format here: + https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + + NOTE: The minimum interval time between recurring transfers + depends on the data source; refer to the documentation for + your data source. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Specifies time to start scheduling transfer + runs. The first run will be scheduled at or + after the start time according to a recurrence + pattern defined in the schedule string. The + start time can be changed at any moment. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Defines time to stop scheduling transfer + runs. A transfer run cannot be scheduled at or + after the end time. The end time can be changed + at any moment. + """ + + schedule: str = proto.Field( + proto.STRING, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class ManualSchedule(proto.Message): + r"""Options customizing manual transfers schedule.""" + + +class EventDrivenSchedule(proto.Message): + r"""Options customizing EventDriven transfers schedule. + + Attributes: + pubsub_subscription (str): + Pub/Sub subscription name used to receive + events. Only Google Cloud Storage data source + support this option. Format: + projects/{project}/subscriptions/{subscription} + """ + + pubsub_subscription: str = proto.Field( + proto.STRING, + number=1, + ) + + class UserInfo(proto.Message): r"""Information about a user. @@ -222,6 +350,11 @@ class TransferConfig(proto.Message): schedule_options (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptions): Options customizing the data transfer schedule. + schedule_options_v2 (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptionsV2): + Options customizing different types of data transfer + schedule. This field replaces "schedule" and + "schedule_options" fields. ScheduleOptionsV2 cannot be used + together with ScheduleOptions/Schedule. data_refresh_window_days (int): The number of days to look back to automatically refresh the data. For example, if ``data_refresh_window_days = 10``, @@ -274,6 +407,10 @@ class TransferConfig(proto.Message): effect. Write methods will apply the key if it is present, or otherwise try to apply project default keys if it is absent. + error (google.rpc.status_pb2.Status): + Output only. Error code with detailed + information about reason of the latest config + failure. """ name: str = proto.Field( @@ -307,6 +444,11 @@ class TransferConfig(proto.Message): number=24, message="ScheduleOptions", ) + schedule_options_v2: "ScheduleOptionsV2" = proto.Field( + proto.MESSAGE, + number=31, + message="ScheduleOptionsV2", + ) data_refresh_window_days: int = proto.Field( proto.INT32, number=12, @@ -358,6 +500,11 @@ class TransferConfig(proto.Message): number=28, message="EncryptionConfiguration", ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=32, + message=status_pb2.Status, + ) class EncryptionConfiguration(proto.Message): diff --git a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index da58d7e46817..4c0bdf899c2a 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "3.15.7" + "version": "3.16.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index 527a6e81160e..f144355cd636 100644 --- a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -35,6 +35,7 @@ from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format @@ -8880,6 +8881,17 @@ def test_create_transfer_config_rest(request_type): "start_time": {"seconds": 751, "nanos": 543}, "end_time": {}, }, + "schedule_options_v2": { + "time_based_schedule": { + "schedule": "schedule_value", + "start_time": {}, + "end_time": {}, + }, + "manual_schedule": {}, + "event_driven_schedule": { + "pubsub_subscription": "pubsub_subscription_value" + }, + }, "data_refresh_window_days": 2543, "disabled": True, "update_time": {}, @@ -8891,6 +8903,16 @@ def test_create_transfer_config_rest(request_type): "email_preferences": {"enable_failure_email": True}, "owner_info": {"email": "email_value"}, "encryption_configuration": {"kms_key_name": {"value": "value_value"}}, + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -9327,6 +9349,17 @@ def test_update_transfer_config_rest(request_type): "start_time": {"seconds": 751, "nanos": 543}, "end_time": {}, }, + "schedule_options_v2": { + "time_based_schedule": { + "schedule": "schedule_value", + "start_time": {}, + "end_time": {}, + }, + "manual_schedule": {}, + "event_driven_schedule": { + "pubsub_subscription": "pubsub_subscription_value" + }, + }, "data_refresh_window_days": 2543, "disabled": True, "update_time": {}, @@ -9338,6 +9371,16 @@ def test_update_transfer_config_rest(request_type): "email_preferences": {"enable_failure_email": True}, "owner_info": {"email": "email_value"}, "encryption_configuration": {"kms_key_name": {"value": "value_value"}}, + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-build/CHANGELOG.md b/packages/google-cloud-build/CHANGELOG.md index 619b570d55f6..fb07ad800441 100644 --- a/packages/google-cloud-build/CHANGELOG.md +++ b/packages/google-cloud-build/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-build/#history +## [3.25.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.24.2...google-cloud-build-v3.25.0) (2024-09-23) + + +### Features + +* Add LEGACY_BUCKET option to DefaultLogsBucketBehavior ([e889809](https://github.com/googleapis/google-cloud-python/commit/e889809389c5b194ec77955664eb2859cde28d73)) + + +### Documentation + +* Sanitize docs ([e889809](https://github.com/googleapis/google-cloud-python/commit/e889809389c5b194ec77955664eb2859cde28d73)) + ## [3.24.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.24.1...google-cloud-build-v3.24.2) (2024-07-30) diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py index 558c8aab67c5..8adcea73e25d 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py index 558c8aab67c5..8adcea73e25d 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index a10715d0ffdf..995ae202614c 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -3278,7 +3278,7 @@ class LoggingMode(proto.Enum): NONE = 4 class DefaultLogsBucketBehavior(proto.Enum): - r"""Default GCS log bucket behavior options. + r"""Default Cloud Storage log bucket behavior options. Values: DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED (0): @@ -3287,10 +3287,15 @@ class DefaultLogsBucketBehavior(proto.Enum): Bucket is located in user-owned project in the same region as the build. The builder service account must have access to create and - write to GCS buckets in the build project. + write to Cloud Storage buckets in the build + project. + LEGACY_BUCKET (2): + Bucket is located in a Google-owned project + and is not regionalized. """ DEFAULT_LOGS_BUCKET_BEHAVIOR_UNSPECIFIED = 0 REGIONAL_USER_OWNED_BUCKET = 1 + LEGACY_BUCKET = 2 class PoolOption(proto.Message): r"""Details about how a build should be executed on a ``WorkerPool``. diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py index 558c8aab67c5..8adcea73e25d 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json index e379efab560f..66e42a84ba95 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "0.1.0" + "version": "3.25.0" }, "snippets": [ { diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json index 818d3fc2029c..f4891e033575 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "0.1.0" + "version": "3.25.0" }, "snippets": [ { diff --git a/packages/google-cloud-channel/CHANGELOG.md b/packages/google-cloud-channel/CHANGELOG.md index da482c90412d..252ea1ffe6f5 100644 --- a/packages/google-cloud-channel/CHANGELOG.md +++ b/packages/google-cloud-channel/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [1.19.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-channel-v1.18.5...google-cloud-channel-v1.19.0) (2024-10-08) + + +### Features + +* Add support for importing team customer from a different reseller ([c38431b](https://github.com/googleapis/google-cloud-python/commit/c38431b363fd4f18bb692593f401e3ac3759637c)) +* Add support for primary_admin_email as customer_identity for ImportCustomer ([c38431b](https://github.com/googleapis/google-cloud-python/commit/c38431b363fd4f18bb692593f401e3ac3759637c)) +* Add support to look up team customer Cloud Identity information ([c38431b](https://github.com/googleapis/google-cloud-python/commit/c38431b363fd4f18bb692593f401e3ac3759637c)) + + +### Documentation + +* Clarify the expected value of the domain field for team type customers ([c38431b](https://github.com/googleapis/google-cloud-python/commit/c38431b363fd4f18bb692593f401e3ac3759637c)) + ## [1.18.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-channel-v1.18.4...google-cloud-channel-v1.18.5) (2024-07-30) diff --git a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py index 558c8aab67c5..f1337c609ff8 100644 --- a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.19.0" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py index 558c8aab67c5..f1337c609ff8 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.19.0" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py index c8ae1f8d383f..9738f170a601 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py @@ -127,7 +127,12 @@ class CheckCloudIdentityAccountsExistRequest(proto.Message): the format: accounts/{account_id} domain (str): Required. Domain to fetch for Cloud Identity - account customer. + account customers, including domain and team + customers. For team customers, please use the + domain for their emails. + primary_admin_email (str): + Optional. Primary admin email to fetch for + Cloud Identity account team customer. """ parent: str = proto.Field( @@ -138,6 +143,10 @@ class CheckCloudIdentityAccountsExistRequest(proto.Message): proto.STRING, number=2, ) + primary_admin_email: str = proto.Field( + proto.STRING, + number=4, + ) class CloudIdentityCustomerAccount(proto.Message): @@ -159,6 +168,11 @@ class CloudIdentityCustomerAccount(proto.Message): customer_cloud_identity_id (str): If existing = true, the Cloud Identity ID of the customer. + customer_type (google.cloud.channel_v1.types.CloudIdentityInfo.CustomerType): + If existing = true, the type of the customer. + channel_partner_cloud_identity_id (str): + If existing = true, and is 2-tier customer, + the channel partner of the customer. """ existing: bool = proto.Field( @@ -177,6 +191,15 @@ class CloudIdentityCustomerAccount(proto.Message): proto.STRING, number=4, ) + customer_type: common.CloudIdentityInfo.CustomerType = proto.Field( + proto.ENUM, + number=5, + enum=common.CloudIdentityInfo.CustomerType, + ) + channel_partner_cloud_identity_id: str = proto.Field( + proto.STRING, + number=6, + ) class CheckCloudIdentityAccountsExistResponse(proto.Message): @@ -373,6 +396,10 @@ class ImportCustomerRequest(proto.Message): cloud_identity_id (str): Required. Customer's Cloud Identity ID + This field is a member of `oneof`_ ``customer_identity``. + primary_admin_email (str): + Required. Customer's primary admin email. + This field is a member of `oneof`_ ``customer_identity``. parent (str): Required. The resource name of the reseller's account. @@ -413,6 +440,11 @@ class ImportCustomerRequest(proto.Message): number=3, oneof="customer_identity", ) + primary_admin_email: str = proto.Field( + proto.STRING, + number=8, + oneof="customer_identity", + ) parent: str = proto.Field( proto.STRING, number=1, diff --git a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json index 864fdea496be..075cb1b96e00 100644 --- a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json +++ b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-channel", - "version": "0.1.0" + "version": "1.19.0" }, "snippets": [ { diff --git a/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py b/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py index 7c3e175a35d5..a7022924a590 100644 --- a/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py +++ b/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py @@ -44,7 +44,7 @@ class channelCallTransformer(cst.CSTTransformer): 'change_offer': ('name', 'offer', 'parameters', 'purchase_order_id', 'request_id', 'billing_account', ), 'change_parameters': ('name', 'parameters', 'request_id', 'purchase_order_id', ), 'change_renewal_settings': ('name', 'renewal_settings', 'request_id', ), - 'check_cloud_identity_accounts_exist': ('parent', 'domain', ), + 'check_cloud_identity_accounts_exist': ('parent', 'domain', 'primary_admin_email', ), 'create_channel_partner_link': ('parent', 'channel_partner_link', ), 'create_channel_partner_repricing_config': ('parent', 'channel_partner_repricing_config', ), 'create_customer': ('parent', 'customer', ), @@ -59,7 +59,7 @@ class channelCallTransformer(cst.CSTTransformer): 'get_customer': ('name', ), 'get_customer_repricing_config': ('name', ), 'get_entitlement': ('name', ), - 'import_customer': ('domain', 'cloud_identity_id', 'parent', 'overwrite_if_exists', 'auth_token', 'channel_partner_id', 'customer', ), + 'import_customer': ('domain', 'cloud_identity_id', 'primary_admin_email', 'parent', 'overwrite_if_exists', 'auth_token', 'channel_partner_id', 'customer', ), 'list_channel_partner_links': ('parent', 'page_size', 'page_token', 'view', ), 'list_channel_partner_repricing_configs': ('parent', 'page_size', 'page_token', 'filter', ), 'list_customer_repricing_configs': ('parent', 'page_size', 'page_token', 'filter', ), diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py index f454d78314e5..b6ab34645279 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py @@ -2110,6 +2110,7 @@ def test_check_cloud_identity_accounts_exist_non_empty_request_with_auto_populat request = service.CheckCloudIdentityAccountsExistRequest( parent="parent_value", domain="domain_value", + primary_admin_email="primary_admin_email_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2125,6 +2126,7 @@ def test_check_cloud_identity_accounts_exist_non_empty_request_with_auto_populat assert args[0] == service.CheckCloudIdentityAccountsExistRequest( parent="parent_value", domain="domain_value", + primary_admin_email="primary_admin_email_value", ) @@ -3397,6 +3399,7 @@ def test_import_customer_non_empty_request_with_auto_populated_field(): request = service.ImportCustomerRequest( domain="domain_value", cloud_identity_id="cloud_identity_id_value", + primary_admin_email="primary_admin_email_value", parent="parent_value", auth_token="auth_token_value", channel_partner_id="channel_partner_id_value", @@ -3414,6 +3417,7 @@ def test_import_customer_non_empty_request_with_auto_populated_field(): assert args[0] == service.ImportCustomerRequest( domain="domain_value", cloud_identity_id="cloud_identity_id_value", + primary_admin_email="primary_admin_email_value", parent="parent_value", auth_token="auth_token_value", channel_partner_id="channel_partner_id_value", diff --git a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md index b2bd23a8caac..275e18ff132a 100644 --- a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md +++ b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md @@ -1,5 +1,28 @@ # Changelog +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.1.3...google-cloud-cloudcontrolspartner-v0.2.0) (2024-09-16) + + +### ⚠ BREAKING CHANGES + +* [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed + +### Features + +* A new value `ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER` is added to enum `.google.cloud.cloudcontrolspartner.v1beta.PartnerPermissions.Permission` ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) +* Field behavior for field `customer_onboarding_state` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) +* Field behavior for field `is_onboarded` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) + + +### Bug Fixes + +* [google-cloud-cloudcontrolspartner] Field behavior for field display_name in message .google.cloud.cloudcontrolspartner.v1beta.Customer is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) + + +### Documentation + +* A comment for field `display_name` in message `.google.cloud.cloudcontrolspartner.v1beta.Customer` is changed ([c03c441](https://github.com/googleapis/google-cloud-python/commit/c03c4411287ee195fd5c99aff94d812381a908f3)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.1.2...google-cloud-cloudcontrolspartner-v0.1.3) (2024-07-30) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py index 62eee778fdaf..917b2c256294 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/customers.py @@ -45,13 +45,13 @@ class Customer(proto.Message): Identifier. Format: ``organizations/{organization}/locations/{location}/customers/{customer}`` display_name (str): - The customer organization's display name. - E.g. "google.com". + Required. Display name for the customer customer_onboarding_state (google.cloud.cloudcontrolspartner_v1.types.CustomerOnboardingState): - Container for customer onboarding steps + Output only. Container for customer + onboarding steps is_onboarded (bool): - Indicates whether a customer is fully - onboarded + Output only. Indicates whether a customer is + fully onboarded """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py index 072b279e0861..a9c4f2513124 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/types/partner_permissions.py @@ -57,12 +57,16 @@ class Permission(proto.Enum): ASSURED_WORKLOADS_EKM_CONNECTION_STATUS (4): Permission for External Key Manager connection status + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER (5): + Permission for support case details for + Access Transparency log entries """ PERMISSION_UNSPECIFIED = 0 ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS = 1 ASSURED_WORKLOADS_MONITORING = 2 ACCESS_APPROVAL_REQUESTS = 3 ASSURED_WORKLOADS_EKM_CONNECTION_STATUS = 4 + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER = 5 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py index 558c8aab67c5..364164ddb134 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py index dae25231d4f0..2237867d884f 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py @@ -45,13 +45,13 @@ class Customer(proto.Message): Identifier. Format: ``organizations/{organization}/locations/{location}/customers/{customer}`` display_name (str): - The customer organization's display name. - E.g. "google.com". + Required. Display name for the customer customer_onboarding_state (google.cloud.cloudcontrolspartner_v1beta.types.CustomerOnboardingState): - Container for customer onboarding steps + Output only. Container for customer + onboarding steps is_onboarded (bool): - Indicates whether a customer is fully - onboarded + Output only. Indicates whether a customer is + fully onboarded """ name: str = proto.Field( diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py index d94dff633d35..eddc0cf9ab95 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partner_permissions.py @@ -57,12 +57,16 @@ class Permission(proto.Enum): ASSURED_WORKLOADS_EKM_CONNECTION_STATUS (4): Permission for External Key Manager connection status + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER (5): + Permission for support case details for + Access Transparency log entries """ PERMISSION_UNSPECIFIED = 0 ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS = 1 ASSURED_WORKLOADS_MONITORING = 2 ACCESS_APPROVAL_REQUESTS = 3 ASSURED_WORKLOADS_EKM_CONNECTION_STATUS = 4 + ACCESS_TRANSPARENCY_LOGS_SUPPORT_CASE_VIEWER = 5 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json index 5a93fc370b33..606c14b81f01 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.1.0" + "version": "0.2.0" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json index 642805220b5c..9c0039bf1f65 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.1.0" + "version": "0.2.0" }, "snippets": [ { diff --git a/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md b/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md index 8e7cbe49e394..8bed09396aee 100644 --- a/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md +++ b/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [0.1.8](https://github.com/googleapis/google-cloud-python/compare/google-cloud-commerce-consumer-procurement-v0.1.7...google-cloud-commerce-consumer-procurement-v0.1.8) (2024-10-08) + + +### Features + +* add Order modification RPCs and License Management Service ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) + + +### Documentation + +* A comment for enum value `LINE_ITEM_CHANGE_STATE_ABANDONED` in enum `LineItemChangeState` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for enum value `LINE_ITEM_CHANGE_STATE_ACTIVATING` in enum `LineItemChangeState` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for enum value `LINE_ITEM_CHANGE_STATE_APPROVED` in enum `LineItemChangeState` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for enum value `LINE_ITEM_CHANGE_STATE_COMPLETED` in enum `LineItemChangeState` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for enum value `LINE_ITEM_CHANGE_STATE_PENDING_APPROVAL` in enum `LineItemChangeState` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for enum value `LINE_ITEM_CHANGE_STATE_REJECTED` in enum `LineItemChangeState` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for field `filter` in message `.google.cloud.commerce.consumer.procurement.v1.ListOrdersRequest` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) +* A comment for field `request_id` in message `.google.cloud.commerce.consumer.procurement.v1.PlaceOrderRequest` is changed ([852d797](https://github.com/googleapis/google-cloud-python/commit/852d797f21d4809c32d98b384c60bf9852b14216)) + ## [0.1.7](https://github.com/googleapis/google-cloud-python/compare/google-cloud-commerce-consumer-procurement-v0.1.6...google-cloud-commerce-consumer-procurement-v0.1.7) (2024-07-30) diff --git a/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst new file mode 100644 index 000000000000..d08a71e7aec0 --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/license_management_service.rst @@ -0,0 +1,10 @@ +LicenseManagementService +------------------------------------------ + +.. automodule:: google.cloud.commerce_consumer_procurement_v1.services.license_management_service + :members: + :inherited-members: + +.. automodule:: google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst index d5e8b5f12ecb..8d66166cebbe 100644 --- a/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst +++ b/packages/google-cloud-commerce-consumer-procurement/docs/commerce_consumer_procurement_v1/services_.rst @@ -4,3 +4,4 @@ Services for Google Cloud Commerce Consumer Procurement v1 API :maxdepth: 2 consumer_procurement_service + license_management_service diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py index f271433c727b..bba3576813f8 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/__init__.py @@ -24,6 +24,25 @@ from google.cloud.commerce_consumer_procurement_v1.services.consumer_procurement_service.client import ( ConsumerProcurementServiceClient, ) +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service.async_client import ( + LicenseManagementServiceAsyncClient, +) +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service.client import ( + LicenseManagementServiceClient, +) +from google.cloud.commerce_consumer_procurement_v1.types.license_management_service import ( + AssignmentProtocol, + AssignRequest, + AssignResponse, + EnumerateLicensedUsersRequest, + EnumerateLicensedUsersResponse, + GetLicensePoolRequest, + LicensedUser, + LicensePool, + UnassignRequest, + UnassignResponse, + UpdateLicensePoolRequest, +) from google.cloud.commerce_consumer_procurement_v1.types.order import ( LineItem, LineItemChange, @@ -36,9 +55,14 @@ Subscription, ) from google.cloud.commerce_consumer_procurement_v1.types.procurement_service import ( + AutoRenewalBehavior, + CancelOrderMetadata, + CancelOrderRequest, GetOrderRequest, ListOrdersRequest, ListOrdersResponse, + ModifyOrderMetadata, + ModifyOrderRequest, PlaceOrderMetadata, PlaceOrderRequest, ) @@ -46,6 +70,19 @@ __all__ = ( "ConsumerProcurementServiceClient", "ConsumerProcurementServiceAsyncClient", + "LicenseManagementServiceClient", + "LicenseManagementServiceAsyncClient", + "AssignmentProtocol", + "AssignRequest", + "AssignResponse", + "EnumerateLicensedUsersRequest", + "EnumerateLicensedUsersResponse", + "GetLicensePoolRequest", + "LicensedUser", + "LicensePool", + "UnassignRequest", + "UnassignResponse", + "UpdateLicensePoolRequest", "LineItem", "LineItemChange", "LineItemInfo", @@ -55,9 +92,14 @@ "LineItemChangeState", "LineItemChangeStateReasonType", "LineItemChangeType", + "CancelOrderMetadata", + "CancelOrderRequest", "GetOrderRequest", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderMetadata", + "ModifyOrderRequest", "PlaceOrderMetadata", "PlaceOrderRequest", + "AutoRenewalBehavior", ) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py index 558c8aab67c5..ec8d212c9160 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.8" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py index d1a4fa34b7d8..2a6c2e07c4ab 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/__init__.py @@ -24,6 +24,23 @@ ConsumerProcurementServiceAsyncClient, ConsumerProcurementServiceClient, ) +from .services.license_management_service import ( + LicenseManagementServiceAsyncClient, + LicenseManagementServiceClient, +) +from .types.license_management_service import ( + AssignmentProtocol, + AssignRequest, + AssignResponse, + EnumerateLicensedUsersRequest, + EnumerateLicensedUsersResponse, + GetLicensePoolRequest, + LicensedUser, + LicensePool, + UnassignRequest, + UnassignResponse, + UpdateLicensePoolRequest, +) from .types.order import ( LineItem, LineItemChange, @@ -36,17 +53,35 @@ Subscription, ) from .types.procurement_service import ( + AutoRenewalBehavior, + CancelOrderMetadata, + CancelOrderRequest, GetOrderRequest, ListOrdersRequest, ListOrdersResponse, + ModifyOrderMetadata, + ModifyOrderRequest, PlaceOrderMetadata, PlaceOrderRequest, ) __all__ = ( "ConsumerProcurementServiceAsyncClient", + "LicenseManagementServiceAsyncClient", + "AssignRequest", + "AssignResponse", + "AssignmentProtocol", + "AutoRenewalBehavior", + "CancelOrderMetadata", + "CancelOrderRequest", "ConsumerProcurementServiceClient", + "EnumerateLicensedUsersRequest", + "EnumerateLicensedUsersResponse", + "GetLicensePoolRequest", "GetOrderRequest", + "LicenseManagementServiceClient", + "LicensePool", + "LicensedUser", "LineItem", "LineItemChange", "LineItemChangeState", @@ -55,9 +90,14 @@ "LineItemInfo", "ListOrdersRequest", "ListOrdersResponse", + "ModifyOrderMetadata", + "ModifyOrderRequest", "Order", "Parameter", "PlaceOrderMetadata", "PlaceOrderRequest", "Subscription", + "UnassignRequest", + "UnassignResponse", + "UpdateLicensePoolRequest", ) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json index 638c161ad386..e11a84a7c315 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "ConsumerProcurementServiceClient", "rpcs": { + "CancelOrder": { + "methods": [ + "cancel_order" + ] + }, "GetOrder": { "methods": [ "get_order" @@ -20,6 +25,11 @@ "list_orders" ] }, + "ModifyOrder": { + "methods": [ + "modify_order" + ] + }, "PlaceOrder": { "methods": [ "place_order" @@ -30,6 +40,11 @@ "grpc-async": { "libraryClient": "ConsumerProcurementServiceAsyncClient", "rpcs": { + "CancelOrder": { + "methods": [ + "cancel_order" + ] + }, "GetOrder": { "methods": [ "get_order" @@ -40,6 +55,11 @@ "list_orders" ] }, + "ModifyOrder": { + "methods": [ + "modify_order" + ] + }, "PlaceOrder": { "methods": [ "place_order" @@ -50,6 +70,11 @@ "rest": { "libraryClient": "ConsumerProcurementServiceClient", "rpcs": { + "CancelOrder": { + "methods": [ + "cancel_order" + ] + }, "GetOrder": { "methods": [ "get_order" @@ -60,6 +85,11 @@ "list_orders" ] }, + "ModifyOrder": { + "methods": [ + "modify_order" + ] + }, "PlaceOrder": { "methods": [ "place_order" @@ -68,6 +98,100 @@ } } } + }, + "LicenseManagementService": { + "clients": { + "grpc": { + "libraryClient": "LicenseManagementServiceClient", + "rpcs": { + "Assign": { + "methods": [ + "assign" + ] + }, + "EnumerateLicensedUsers": { + "methods": [ + "enumerate_licensed_users" + ] + }, + "GetLicensePool": { + "methods": [ + "get_license_pool" + ] + }, + "Unassign": { + "methods": [ + "unassign" + ] + }, + "UpdateLicensePool": { + "methods": [ + "update_license_pool" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LicenseManagementServiceAsyncClient", + "rpcs": { + "Assign": { + "methods": [ + "assign" + ] + }, + "EnumerateLicensedUsers": { + "methods": [ + "enumerate_licensed_users" + ] + }, + "GetLicensePool": { + "methods": [ + "get_license_pool" + ] + }, + "Unassign": { + "methods": [ + "unassign" + ] + }, + "UpdateLicensePool": { + "methods": [ + "update_license_pool" + ] + } + } + }, + "rest": { + "libraryClient": "LicenseManagementServiceClient", + "rpcs": { + "Assign": { + "methods": [ + "assign" + ] + }, + "EnumerateLicensedUsers": { + "methods": [ + "enumerate_licensed_users" + ] + }, + "GetLicensePool": { + "methods": [ + "get_license_pool" + ] + }, + "Unassign": { + "methods": [ + "unassign" + ] + }, + "UpdateLicensePool": { + "methods": [ + "update_license_pool" + ] + } + } + } + } } } } diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py index 558c8aab67c5..ec8d212c9160 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.8" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py index ba83a537babe..4b4132787111 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/async_client.py @@ -658,6 +658,230 @@ async def sample_list_orders(): # Done; return the response. return response + async def modify_order( + self, + request: Optional[Union[procurement_service.ModifyOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest, dict]]): + The request object. Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.ModifyOrderRequest): + request = procurement_service.ModifyOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.modify_order + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + order.Order, + metadata_type=procurement_service.ModifyOrderMetadata, + ) + + # Done; return the response. + return response + + async def cancel_order( + self, + request: Optional[Union[procurement_service.CancelOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest, dict]]): + The request object. Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.CancelOrderRequest): + request = procurement_service.CancelOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.cancel_order + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + order.Order, + metadata_type=procurement_service.CancelOrderMetadata, + ) + + # Done; return the response. + return response + async def get_operation( self, request: Optional[operations_pb2.GetOperationRequest] = None, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py index be7ec242cec1..525ad9877370 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py @@ -1082,6 +1082,226 @@ def sample_list_orders(): # Done; return the response. return response + def modify_order( + self, + request: Optional[Union[procurement_service.ModifyOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_modify_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.ModifyOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.modify_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.ModifyOrderRequest, dict]): + The request object. Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.ModifyOrderRequest): + request = procurement_service.ModifyOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_order] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + order.Order, + metadata_type=procurement_service.ModifyOrderMetadata, + ) + + # Done; return the response. + return response + + def cancel_order( + self, + request: Optional[Union[procurement_service.CancelOrderRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + def sample_cancel_order(): + # Create a client + client = commerce_consumer_procurement_v1.ConsumerProcurementServiceClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.CancelOrderRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_order(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.commerce_consumer_procurement_v1.types.CancelOrderRequest, dict]): + The request object. Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.commerce_consumer_procurement_v1.types.Order` Represents a purchase made by a customer on Cloud Marketplace. + Creating an order makes sure that both the Google + backend systems as well as external service + provider's systems (if needed) allow use of purchased + products and ensures the appropriate billing events + occur. + + An Order can be made against one Product with + multiple add-ons (optional) or one Quote which might + reference multiple products. + + Customers typically choose a price plan for each + Product purchased when they create an order and can + change their plan later, if the product allows. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, procurement_service.CancelOrderRequest): + request = procurement_service.CancelOrderRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_order] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + order.Order, + metadata_type=procurement_service.CancelOrderMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ConsumerProcurementServiceClient": return self diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py index 405ae9789b33..b7df5231e9d5 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/base.py @@ -167,6 +167,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.modify_order: gapic_v1.method.wrap_method( + self.modify_order, + default_timeout=None, + client_info=client_info, + ), + self.cancel_order: gapic_v1.method.wrap_method( + self.cancel_order, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -213,6 +223,24 @@ def list_orders( ]: raise NotImplementedError() + @property + def modify_order( + self, + ) -> Callable[ + [procurement_service.ModifyOrderRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_order( + self, + ) -> Callable[ + [procurement_service.CancelOrderRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_operation( self, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py index 25e976eeed36..307d720364fa 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc.py @@ -353,6 +353,62 @@ def list_orders( ) return self._stubs["list_orders"] + @property + def modify_order( + self, + ) -> Callable[[procurement_service.ModifyOrderRequest], operations_pb2.Operation]: + r"""Return a callable for the modify order method over gRPC. + + Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + Returns: + Callable[[~.ModifyOrderRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_order" not in self._stubs: + self._stubs["modify_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/ModifyOrder", + request_serializer=procurement_service.ModifyOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["modify_order"] + + @property + def cancel_order( + self, + ) -> Callable[[procurement_service.CancelOrderRequest], operations_pb2.Operation]: + r"""Return a callable for the cancel order method over gRPC. + + Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + Returns: + Callable[[~.CancelOrderRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_order" not in self._stubs: + self._stubs["cancel_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/CancelOrder", + request_serializer=procurement_service.CancelOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_order"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py index fb34a2b76187..0cdba90bcd6f 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/grpc_asyncio.py @@ -366,6 +366,66 @@ def list_orders( ) return self._stubs["list_orders"] + @property + def modify_order( + self, + ) -> Callable[ + [procurement_service.ModifyOrderRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the modify order method over gRPC. + + Modifies an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order] + resource. + + Returns: + Callable[[~.ModifyOrderRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_order" not in self._stubs: + self._stubs["modify_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/ModifyOrder", + request_serializer=procurement_service.ModifyOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["modify_order"] + + @property + def cancel_order( + self, + ) -> Callable[ + [procurement_service.CancelOrderRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the cancel order method over gRPC. + + Cancels an existing + [Order][google.cloud.commerce.consumer.procurement.v1.Order]. + Every product procured in the Order will be cancelled. + + Returns: + Callable[[~.CancelOrderRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_order" not in self._stubs: + self._stubs["cancel_order"] = self.grpc_channel.unary_unary( + "/google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService/CancelOrder", + request_serializer=procurement_service.CancelOrderRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_order"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -402,6 +462,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.modify_order: gapic_v1.method_async.wrap_method( + self.modify_order, + default_timeout=None, + client_info=client_info, + ), + self.cancel_order: gapic_v1.method_async.wrap_method( + self.cancel_order, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py index 353e9db69b53..16459934f854 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py @@ -74,6 +74,14 @@ class ConsumerProcurementServiceRestInterceptor: .. code-block:: python class MyCustomConsumerProcurementServiceInterceptor(ConsumerProcurementServiceRestInterceptor): + def pre_cancel_order(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_order(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_order(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -90,6 +98,14 @@ def post_list_orders(self, response): logging.log(f"Received response: {response}") return response + def pre_modify_order(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_modify_order(self, response): + logging.log(f"Received response: {response}") + return response + def pre_place_order(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -104,6 +120,29 @@ def post_place_order(self, response): """ + def pre_cancel_order( + self, + request: procurement_service.CancelOrderRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[procurement_service.CancelOrderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_order + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConsumerProcurementService server. + """ + return request, metadata + + def post_cancel_order( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for cancel_order + + Override in a subclass to manipulate the response + after it is returned by the ConsumerProcurementService server but before + it is returned to user code. + """ + return response + def pre_get_order( self, request: procurement_service.GetOrderRequest, @@ -148,6 +187,29 @@ def post_list_orders( """ return response + def pre_modify_order( + self, + request: procurement_service.ModifyOrderRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[procurement_service.ModifyOrderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for modify_order + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConsumerProcurementService server. + """ + return request, metadata + + def post_modify_order( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for modify_order + + Override in a subclass to manipulate the response + after it is returned by the ConsumerProcurementService server but before + it is returned to user code. + """ + return response + def pre_place_order( self, request: procurement_service.PlaceOrderRequest, @@ -333,6 +395,100 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _CancelOrder(ConsumerProcurementServiceRestStub): + def __hash__(self): + return hash("CancelOrder") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: procurement_service.CancelOrderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the cancel order method over HTTP. + + Args: + request (~.procurement_service.CancelOrderRequest): + The request object. Request message for + [ConsumerProcurementService.CancelOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.CancelOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=billingAccounts/*/orders/*}:cancel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_cancel_order(request, metadata) + pb_request = procurement_service.CancelOrderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_order(resp) + return resp + class _GetOrder(ConsumerProcurementServiceRestStub): def __hash__(self): return hash("GetOrder") @@ -525,6 +681,100 @@ def __call__( resp = self._interceptor.post_list_orders(resp) return resp + class _ModifyOrder(ConsumerProcurementServiceRestStub): + def __hash__(self): + return hash("ModifyOrder") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: procurement_service.ModifyOrderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the modify order method over HTTP. + + Args: + request (~.procurement_service.ModifyOrderRequest): + The request object. Request message for + [ConsumerProcurementService.ModifyOrder][google.cloud.commerce.consumer.procurement.v1.ConsumerProcurementService.ModifyOrder]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=billingAccounts/*/orders/*}:modify", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_modify_order(request, metadata) + pb_request = procurement_service.ModifyOrderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_modify_order(resp) + return resp + class _PlaceOrder(ConsumerProcurementServiceRestStub): def __hash__(self): return hash("PlaceOrder") @@ -619,6 +869,14 @@ def __call__( resp = self._interceptor.post_place_order(resp) return resp + @property + def cancel_order( + self, + ) -> Callable[[procurement_service.CancelOrderRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelOrder(self._session, self._host, self._interceptor) # type: ignore + @property def get_order(self) -> Callable[[procurement_service.GetOrderRequest], order.Order]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -635,6 +893,14 @@ def list_orders( # In C++ this would require a dynamic_cast return self._ListOrders(self._session, self._host, self._interceptor) # type: ignore + @property + def modify_order( + self, + ) -> Callable[[procurement_service.ModifyOrderRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ModifyOrder(self._session, self._host, self._interceptor) # type: ignore + @property def place_order( self, diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py new file mode 100644 index 000000000000..d1b19f8f83bc --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import LicenseManagementServiceAsyncClient +from .client import LicenseManagementServiceClient + +__all__ = ( + "LicenseManagementServiceClient", + "LicenseManagementServiceAsyncClient", +) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py new file mode 100644 index 000000000000..015a76c88aef --- /dev/null +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/async_client.py @@ -0,0 +1,925 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.commerce_consumer_procurement_v1 import ( + gapic_version as package_version, +) + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service import ( + pagers, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, +) + +from .client import LicenseManagementServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport +from .transports.grpc_asyncio import LicenseManagementServiceGrpcAsyncIOTransport + + +class LicenseManagementServiceAsyncClient: + """Service for managing licenses.""" + + _client: LicenseManagementServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = LicenseManagementServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LicenseManagementServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + LicenseManagementServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = LicenseManagementServiceClient._DEFAULT_UNIVERSE + + license_pool_path = staticmethod(LicenseManagementServiceClient.license_pool_path) + parse_license_pool_path = staticmethod( + LicenseManagementServiceClient.parse_license_pool_path + ) + common_billing_account_path = staticmethod( + LicenseManagementServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + LicenseManagementServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(LicenseManagementServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + LicenseManagementServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + LicenseManagementServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + LicenseManagementServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + LicenseManagementServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + LicenseManagementServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + LicenseManagementServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + LicenseManagementServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceAsyncClient: The constructed client. + """ + return LicenseManagementServiceClient.from_service_account_info.__func__(LicenseManagementServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LicenseManagementServiceAsyncClient: The constructed client. + """ + return LicenseManagementServiceClient.from_service_account_file.__func__(LicenseManagementServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return LicenseManagementServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> LicenseManagementServiceTransport: + """Returns the transport used by the client instance. + + Returns: + LicenseManagementServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = LicenseManagementServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + LicenseManagementServiceTransport, + Callable[..., LicenseManagementServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the license management service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,LicenseManagementServiceTransport,Callable[..., LicenseManagementServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LicenseManagementServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = LicenseManagementServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_license_pool( + self, + request: Optional[ + Union[license_management_service.GetLicensePoolRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Gets the license pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_get_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.GetLicensePoolRequest( + name="name_value", + ) + + # Make the request + response = await client.get_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.GetLicensePoolRequest, dict]]): + The request object. Request message for getting a license + pool. + name (:class:`str`): + Required. The name of the license pool to get. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.GetLicensePoolRequest): + request = license_management_service.GetLicensePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_license_pool + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_license_pool( + self, + request: Optional[ + Union[license_management_service.UpdateLicensePoolRequest, dict] + ] = None, + *, + license_pool: Optional[license_management_service.LicensePool] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.LicensePool: + r"""Updates the license pool if one exists for this + Order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_update_license_pool(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UpdateLicensePoolRequest( + ) + + # Make the request + response = await client.update_license_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.UpdateLicensePoolRequest, dict]]): + The request object. Request message for updating a + license pool. + license_pool (:class:`google.cloud.commerce_consumer_procurement_v1.types.LicensePool`): + Required. The license pool to update. + + The license pool's name field is used to identify the + license pool to update. Format: + ``billingAccounts/{billing_account}/orders/{order}/licensePool``. + + This corresponds to the ``license_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.LicensePool: + A license pool represents a pool of + licenses that can be assigned to users. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([license_pool, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UpdateLicensePoolRequest): + request = license_management_service.UpdateLicensePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if license_pool is not None: + request.license_pool = license_pool + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_license_pool + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("license_pool.name", request.license_pool.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def assign( + self, + request: Optional[Union[license_management_service.AssignRequest, dict]] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.AssignResponse: + r"""Assigns a license to a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_assign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.AssignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.assign(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.AssignRequest, dict]]): + The request object. Request message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + parent (:class:`str`): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (:class:`MutableSequence[str]`): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.AssignResponse: + Response message for + [LicenseManagementService.Assign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Assign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.AssignRequest): + request = license_management_service.AssignRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames: + request.usernames.extend(usernames) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.assign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def unassign( + self, + request: Optional[ + Union[license_management_service.UnassignRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + usernames: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> license_management_service.UnassignResponse: + r"""Unassigns a license from a user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_unassign(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.UnassignRequest( + parent="parent_value", + usernames=['usernames_value1', 'usernames_value2'], + ) + + # Make the request + response = await client.unassign(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.UnassignRequest, dict]]): + The request object. Request message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + parent (:class:`str`): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + usernames (:class:`MutableSequence[str]`): + Required. Username. Format: ``name@domain.com``. + This corresponds to the ``usernames`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.types.UnassignResponse: + Response message for + [LicenseManagementService.Unassign][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.Unassign]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, usernames]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, license_management_service.UnassignRequest): + request = license_management_service.UnassignRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if usernames: + request.usernames.extend(usernames) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.unassign] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def enumerate_licensed_users( + self, + request: Optional[ + Union[license_management_service.EnumerateLicensedUsersRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.EnumerateLicensedUsersAsyncPager: + r"""Enumerates all users assigned a license. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import commerce_consumer_procurement_v1 + + async def sample_enumerate_licensed_users(): + # Create a client + client = commerce_consumer_procurement_v1.LicenseManagementServiceAsyncClient() + + # Initialize request argument(s) + request = commerce_consumer_procurement_v1.EnumerateLicensedUsersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.enumerate_licensed_users(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.commerce_consumer_procurement_v1.types.EnumerateLicensedUsersRequest, dict]]): + The request object. Request message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + parent (:class:`str`): + Required. License pool name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.commerce_consumer_procurement_v1.services.license_management_service.pagers.EnumerateLicensedUsersAsyncPager: + Response message for + [LicenseManagementService.EnumerateLicensedUsers][google.cloud.commerce.consumer.procurement.v1.LicenseManagementService.EnumerateLicensedUsers]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, license_management_service.EnumerateLicensedUsersRequest + ): + request = license_management_service.EnumerateLicensedUsersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.enumerate_licensed_users + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.EnumerateLicensedUsersAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "LicenseManagementServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("LicenseManagementServiceAsyncClient",) diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py similarity index 60% rename from packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/client.py rename to packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py index bcd2fdb6f8ea..5c8aee6b83e5 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/line_item_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py @@ -41,7 +41,9 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.ads.admanager_v1 import gapic_version as package_version +from google.cloud.commerce_consumer_procurement_v1 import ( + gapic_version as package_version, +) try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -49,25 +51,23 @@ OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import money_pb2 # type: ignore - -from google.ads.admanager_v1.services.line_item_service import pagers -from google.ads.admanager_v1.types import ( - computed_status_enum, - creative_placeholder, - environment_type_enum, - goal, - line_item_enums, - line_item_service, +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.commerce_consumer_procurement_v1.services.license_management_service import ( + pagers, +) +from google.cloud.commerce_consumer_procurement_v1.types import ( + license_management_service, ) -from .transports.base import DEFAULT_CLIENT_INFO, LineItemServiceTransport -from .transports.rest import LineItemServiceRestTransport +from .transports.base import DEFAULT_CLIENT_INFO, LicenseManagementServiceTransport +from .transports.grpc import LicenseManagementServiceGrpcTransport +from .transports.grpc_asyncio import LicenseManagementServiceGrpcAsyncIOTransport +from .transports.rest import LicenseManagementServiceRestTransport -class LineItemServiceClientMeta(type): - """Metaclass for the LineItemService client. +class LicenseManagementServiceClientMeta(type): + """Metaclass for the LicenseManagementService client. This provides class-level methods for building and retrieving support objects (e.g. transport) without polluting the client instance @@ -76,13 +76,15 @@ class LineItemServiceClientMeta(type): _transport_registry = ( OrderedDict() - ) # type: Dict[str, Type[LineItemServiceTransport]] - _transport_registry["rest"] = LineItemServiceRestTransport + ) # type: Dict[str, Type[LicenseManagementServiceTransport]] + _transport_registry["grpc"] = LicenseManagementServiceGrpcTransport + _transport_registry["grpc_asyncio"] = LicenseManagementServiceGrpcAsyncIOTransport + _transport_registry["rest"] = LicenseManagementServiceRestTransport def get_transport_class( cls, label: Optional[str] = None, - ) -> Type[LineItemServiceTransport]: + ) -> Type[LicenseManagementServiceTransport]: """Returns an appropriate transport class. Args: @@ -101,8 +103,8 @@ def get_transport_class( return next(iter(cls._transport_registry.values())) -class LineItemServiceClient(metaclass=LineItemServiceClientMeta): - """Provides methods for handling LineItem objects.""" +class LicenseManagementServiceClient(metaclass=LicenseManagementServiceClientMeta): + """Service for managing licenses.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -135,12 +137,12 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "admanager.googleapis.com" + DEFAULT_ENDPOINT = "cloudcommerceconsumerprocurement.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) - _DEFAULT_ENDPOINT_TEMPLATE = "admanager.{UNIVERSE_DOMAIN}" + _DEFAULT_ENDPOINT_TEMPLATE = "cloudcommerceconsumerprocurement.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @classmethod @@ -154,7 +156,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - LineItemServiceClient: The constructed client. + LicenseManagementServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials @@ -172,7 +174,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - LineItemServiceClient: The constructed client. + LicenseManagementServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -181,71 +183,35 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @property - def transport(self) -> LineItemServiceTransport: + def transport(self) -> LicenseManagementServiceTransport: """Returns the transport used by the client instance. Returns: - LineItemServiceTransport: The transport used by the client + LicenseManagementServiceTransport: The transport used by the client instance. """ return self._transport @staticmethod - def label_path( - network_code: str, - label: str, - ) -> str: - """Returns a fully-qualified label string.""" - return "networks/{network_code}/labels/{label}".format( - network_code=network_code, - label=label, - ) - - @staticmethod - def parse_label_path(path: str) -> Dict[str, str]: - """Parses a label path into its component segments.""" - m = re.match(r"^networks/(?P.+?)/labels/(?P